diff --git contrib/src/test/results/clientnegative/serde_regex.q.out contrib/src/test/results/clientnegative/serde_regex.q.out index 69a5403ccd..95154bd50b 100644 --- contrib/src/test/results/clientnegative/serde_regex.q.out +++ contrib/src/test/results/clientnegative/serde_regex.q.out @@ -51,13 +51,13 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: host string, identity string, user string, time string, request string, status int, size int, referer string, agent string - name: default.serde_regex input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.contrib.serde2.RegexSerDe serde properties: input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ "]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))? output.format.string %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s + name: hive.default.serde_regex PREHOOK: query: CREATE TABLE serde_regex( host STRING, diff --git contrib/src/test/results/clientpositive/fileformat_base64.q.out contrib/src/test/results/clientpositive/fileformat_base64.q.out index 1006db625e..c2c205a9f9 100644 --- contrib/src/test/results/clientpositive/fileformat_base64.q.out +++ contrib/src/test/results/clientpositive/fileformat_base64.q.out @@ -23,9 +23,9 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.base64_test input format: org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat output format: org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat + name: hive.default.base64_test PREHOOK: query: CREATE TABLE base64_test(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat' diff --git contrib/src/test/results/clientpositive/serde_regex.q.out contrib/src/test/results/clientpositive/serde_regex.q.out index 7462568938..9d933073e3 100644 --- contrib/src/test/results/clientpositive/serde_regex.q.out +++ contrib/src/test/results/clientpositive/serde_regex.q.out @@ -45,13 +45,13 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: host string, identity string, user string, time string, request string, status string, size string, referer string, agent string - name: default.serde_regex input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.contrib.serde2.RegexSerDe serde properties: input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ "]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))? output.format.string %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s + name: hive.default.serde_regex PREHOOK: query: CREATE TABLE serde_regex( host STRING, diff --git hbase-handler/src/test/results/negative/cascade_dbdrop.q.out hbase-handler/src/test/results/negative/cascade_dbdrop.q.out index 9f3d2c01a8..1e930ae9fe 100644 --- hbase-handler/src/test/results/negative/cascade_dbdrop.q.out +++ hbase-handler/src/test/results/negative/cascade_dbdrop.q.out @@ -10,14 +10,14 @@ WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string") TBLPROPERTIES ("hbase.table.name" = "hbase_table_0", "external.table.purge" = "true") PREHOOK: type: CREATETABLE PREHOOK: Output: database:hbasedb -PREHOOK: Output: hbaseDB@hbase_table_0 +PREHOOK: Output: hbasedb@hbase_table_0 POSTHOOK: query: CREATE EXTERNAL TABLE hbaseDB.hbase_table_0(key int, value string) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string") TBLPROPERTIES ("hbase.table.name" = "hbase_table_0", "external.table.purge" = "true") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:hbasedb -POSTHOOK: Output: hbaseDB@hbase_table_0 +POSTHOOK: Output: hbasedb@hbase_table_0 Found 3 items drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ### drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ### diff --git hbase-handler/src/test/results/positive/drop_database_table_hooks.q.out hbase-handler/src/test/results/positive/drop_database_table_hooks.q.out index 90713ef840..603ecc5204 100644 --- hbase-handler/src/test/results/positive/drop_database_table_hooks.q.out +++ hbase-handler/src/test/results/positive/drop_database_table_hooks.q.out @@ -13,43 +13,43 @@ POSTHOOK: Input: database:sometableshavehook PREHOOK: query: CREATE TABLE NOHOOK0 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK0 +PREHOOK: Output: sometableshavehook@nohook0 POSTHOOK: query: CREATE TABLE NOHOOK0 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK0 +POSTHOOK: Output: sometableshavehook@nohook0 PREHOOK: query: CREATE TABLE NOHOOK1 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK1 +PREHOOK: Output: sometableshavehook@nohook1 POSTHOOK: query: CREATE TABLE NOHOOK1 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK1 +POSTHOOK: Output: sometableshavehook@nohook1 PREHOOK: query: CREATE TABLE NOHOOK2 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK2 +PREHOOK: Output: sometableshavehook@nohook2 POSTHOOK: query: CREATE TABLE NOHOOK2 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK2 +POSTHOOK: Output: sometableshavehook@nohook2 PREHOOK: query: CREATE TABLE NOHOOK3 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK3 +PREHOOK: Output: sometableshavehook@nohook3 POSTHOOK: query: CREATE TABLE NOHOOK3 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK3 +POSTHOOK: Output: sometableshavehook@nohook3 PREHOOK: query: CREATE TABLE NOHOOK4 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK4 +PREHOOK: Output: sometableshavehook@nohook4 POSTHOOK: query: CREATE TABLE NOHOOK4 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK4 +POSTHOOK: Output: sometableshavehook@nohook4 PREHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -57,7 +57,7 @@ PREHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@HBASEHOOK0 +PREHOOK: Output: sometableshavehook@hbasehook0 POSTHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -65,7 +65,7 @@ POSTHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@HBASEHOOK0 +POSTHOOK: Output: sometableshavehook@hbasehook0 PREHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -73,7 +73,7 @@ PREHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@HBASEHOOK1 +PREHOOK: Output: sometableshavehook@hbasehook1 POSTHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -81,7 +81,7 @@ POSTHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@HBASEHOOK1 +POSTHOOK: Output: sometableshavehook@hbasehook1 PREHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -89,7 +89,7 @@ PREHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@HBASEHOOK2 +PREHOOK: Output: sometableshavehook@hbasehook2 POSTHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -97,7 +97,7 @@ POSTHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@HBASEHOOK2 +POSTHOOK: Output: sometableshavehook@hbasehook2 PREHOOK: query: DROP DATABASE sometableshavehook CASCADE PREHOOK: type: DROPDATABASE PREHOOK: Input: database:sometableshavehook @@ -142,43 +142,43 @@ POSTHOOK: Input: database:sometableshavehook PREHOOK: query: CREATE TABLE NOHOOK0 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK0 +PREHOOK: Output: sometableshavehook@nohook0 POSTHOOK: query: CREATE TABLE NOHOOK0 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK0 +POSTHOOK: Output: sometableshavehook@nohook0 PREHOOK: query: CREATE TABLE NOHOOK1 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK1 +PREHOOK: Output: sometableshavehook@nohook1 POSTHOOK: query: CREATE TABLE NOHOOK1 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK1 +POSTHOOK: Output: sometableshavehook@nohook1 PREHOOK: query: CREATE TABLE NOHOOK2 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK2 +PREHOOK: Output: sometableshavehook@nohook2 POSTHOOK: query: CREATE TABLE NOHOOK2 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK2 +POSTHOOK: Output: sometableshavehook@nohook2 PREHOOK: query: CREATE TABLE NOHOOK3 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK3 +PREHOOK: Output: sometableshavehook@nohook3 POSTHOOK: query: CREATE TABLE NOHOOK3 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK3 +POSTHOOK: Output: sometableshavehook@nohook3 PREHOOK: query: CREATE TABLE NOHOOK4 (name string, number int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@NOHOOK4 +PREHOOK: Output: sometableshavehook@nohook4 POSTHOOK: query: CREATE TABLE NOHOOK4 (name string, number int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@NOHOOK4 +POSTHOOK: Output: sometableshavehook@nohook4 PREHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -186,7 +186,7 @@ PREHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@HBASEHOOK0 +PREHOOK: Output: sometableshavehook@hbasehook0 POSTHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -194,7 +194,7 @@ POSTHOOK: query: CREATE TABLE HBASEHOOK0 (key int, val binary) ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@HBASEHOOK0 +POSTHOOK: Output: sometableshavehook@hbasehook0 PREHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -202,7 +202,7 @@ PREHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@HBASEHOOK1 +PREHOOK: Output: sometableshavehook@hbasehook1 POSTHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -210,7 +210,7 @@ POSTHOOK: query: CREATE TABLE HBASEHOOK1 (key int, val binary) ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@HBASEHOOK1 +POSTHOOK: Output: sometableshavehook@hbasehook1 PREHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -218,7 +218,7 @@ PREHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:sometableshavehook -PREHOOK: Output: sometableshavehook@HBASEHOOK2 +PREHOOK: Output: sometableshavehook@hbasehook2 POSTHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ( @@ -226,7 +226,7 @@ POSTHOOK: query: CREATE TABLE HBASEHOOK2 (key int, val binary) ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:sometableshavehook -POSTHOOK: Output: sometableshavehook@HBASEHOOK2 +POSTHOOK: Output: sometableshavehook@hbasehook2 PREHOOK: query: DROP DATABASE sometableshavehook CASCADE PREHOOK: type: DROPDATABASE PREHOOK: Input: database:sometableshavehook diff --git hbase-handler/src/test/results/positive/hbase_viewjoins.q.out hbase-handler/src/test/results/positive/hbase_viewjoins.q.out index 9eae99d51a..a827d305f3 100644 --- hbase-handler/src/test/results/positive/hbase_viewjoins.q.out +++ hbase-handler/src/test/results/positive/hbase_viewjoins.q.out @@ -33,7 +33,7 @@ TBLPROPERTIES ( 'external.table.purge' = 'true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@HBASE_TABLE_TEST_1 +PREHOOK: Output: default@hbase_table_test_1 POSTHOOK: query: CREATE EXTERNAL TABLE HBASE_TABLE_TEST_1( cvalue string , pk string, @@ -53,20 +53,20 @@ TBLPROPERTIES ( 'external.table.purge' = 'true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@HBASE_TABLE_TEST_1 +POSTHOOK: Output: default@hbase_table_test_1 PREHOOK: query: CREATE VIEW VIEW_HBASE_TABLE_TEST_1 AS SELECT hbase_table_test_1.cvalue,hbase_table_test_1.pk,hbase_table_test_1.ccount FROM hbase_table_test_1 WHERE hbase_table_test_1.ccount IS NOT NULL PREHOOK: type: CREATEVIEW PREHOOK: Input: default@hbase_table_test_1 PREHOOK: Output: database:default -PREHOOK: Output: default@VIEW_HBASE_TABLE_TEST_1 +PREHOOK: Output: default@view_hbase_table_test_1 POSTHOOK: query: CREATE VIEW VIEW_HBASE_TABLE_TEST_1 AS SELECT hbase_table_test_1.cvalue,hbase_table_test_1.pk,hbase_table_test_1.ccount FROM hbase_table_test_1 WHERE hbase_table_test_1.ccount IS NOT NULL POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@hbase_table_test_1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@VIEW_HBASE_TABLE_TEST_1 -POSTHOOK: Lineage: VIEW_HBASE_TABLE_TEST_1.ccount SIMPLE [(hbase_table_test_1)hbase_table_test_1.FieldSchema(name:ccount, type:int, comment:), ] -POSTHOOK: Lineage: VIEW_HBASE_TABLE_TEST_1.cvalue SIMPLE [(hbase_table_test_1)hbase_table_test_1.FieldSchema(name:cvalue, type:string, comment:), ] -POSTHOOK: Lineage: VIEW_HBASE_TABLE_TEST_1.pk SIMPLE [(hbase_table_test_1)hbase_table_test_1.FieldSchema(name:pk, type:string, comment:), ] +POSTHOOK: Output: default@view_hbase_table_test_1 +POSTHOOK: Lineage: view_hbase_table_test_1.ccount SIMPLE [(hbase_table_test_1)hbase_table_test_1.FieldSchema(name:ccount, type:int, comment:), ] +POSTHOOK: Lineage: view_hbase_table_test_1.cvalue SIMPLE [(hbase_table_test_1)hbase_table_test_1.FieldSchema(name:cvalue, type:string, comment:), ] +POSTHOOK: Lineage: view_hbase_table_test_1.pk SIMPLE [(hbase_table_test_1)hbase_table_test_1.FieldSchema(name:pk, type:string, comment:), ] PREHOOK: query: CREATE EXTERNAL TABLE HBASE_TABLE_TEST_2( cvalue string , pk string , @@ -86,7 +86,7 @@ TBLPROPERTIES ( 'external.table.purge' = 'true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@HBASE_TABLE_TEST_2 +PREHOOK: Output: default@hbase_table_test_2 POSTHOOK: query: CREATE EXTERNAL TABLE HBASE_TABLE_TEST_2( cvalue string , pk string , @@ -106,24 +106,24 @@ TBLPROPERTIES ( 'external.table.purge' = 'true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@HBASE_TABLE_TEST_2 +POSTHOOK: Output: default@hbase_table_test_2 PREHOOK: query: CREATE VIEW VIEW_HBASE_TABLE_TEST_2 AS SELECT hbase_table_test_2.cvalue,hbase_table_test_2.pk,hbase_table_test_2.ccount FROM hbase_table_test_2 WHERE hbase_table_test_2.pk >='3-0000h-0' AND hbase_table_test_2.pk <= '3-0000h-g' AND hbase_table_test_2.ccount IS NOT NULL PREHOOK: type: CREATEVIEW PREHOOK: Input: default@hbase_table_test_2 PREHOOK: Output: database:default -PREHOOK: Output: default@VIEW_HBASE_TABLE_TEST_2 +PREHOOK: Output: default@view_hbase_table_test_2 POSTHOOK: query: CREATE VIEW VIEW_HBASE_TABLE_TEST_2 AS SELECT hbase_table_test_2.cvalue,hbase_table_test_2.pk,hbase_table_test_2.ccount FROM hbase_table_test_2 WHERE hbase_table_test_2.pk >='3-0000h-0' AND hbase_table_test_2.pk <= '3-0000h-g' AND hbase_table_test_2.ccount IS NOT NULL POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@hbase_table_test_2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@VIEW_HBASE_TABLE_TEST_2 -POSTHOOK: Lineage: VIEW_HBASE_TABLE_TEST_2.ccount SIMPLE [(hbase_table_test_2)hbase_table_test_2.FieldSchema(name:ccount, type:int, comment:), ] -POSTHOOK: Lineage: VIEW_HBASE_TABLE_TEST_2.cvalue SIMPLE [(hbase_table_test_2)hbase_table_test_2.FieldSchema(name:cvalue, type:string, comment:), ] -POSTHOOK: Lineage: VIEW_HBASE_TABLE_TEST_2.pk SIMPLE [(hbase_table_test_2)hbase_table_test_2.FieldSchema(name:pk, type:string, comment:), ] +POSTHOOK: Output: default@view_hbase_table_test_2 +POSTHOOK: Lineage: view_hbase_table_test_2.ccount SIMPLE [(hbase_table_test_2)hbase_table_test_2.FieldSchema(name:ccount, type:int, comment:), ] +POSTHOOK: Lineage: view_hbase_table_test_2.cvalue SIMPLE [(hbase_table_test_2)hbase_table_test_2.FieldSchema(name:cvalue, type:string, comment:), ] +POSTHOOK: Lineage: view_hbase_table_test_2.pk SIMPLE [(hbase_table_test_2)hbase_table_test_2.FieldSchema(name:pk, type:string, comment:), ] PREHOOK: query: SELECT p.cvalue cvalue FROM `VIEW_HBASE_TABLE_TEST_1` `p` LEFT OUTER JOIN `VIEW_HBASE_TABLE_TEST_2` `A1` diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java index 9b66e6be74..fa33c2f013 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java @@ -174,7 +174,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, } try { - Table table = context.getHive().newTable(desc.getDbTableName()); + Table table = new Table(desc.getTableName()); if (desc.getLocation() != null) { table.setDataLocation(new Path(desc.getLocation())); } diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index cd54e28393..2b1f98a5ba 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.security.authorization.Privilege; @@ -336,7 +337,7 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive } else if (ddlDesc instanceof AlterTableSetLocationDesc) { AlterTableSetLocationDesc alterTable = (AlterTableSetLocationDesc)ddlDesc; Table table = hive.getTable(SessionState.get().getCurrentDatabase(), - Utilities.getDbTableName(alterTable.getDbTableName())[1], false); + HiveTableName.of(alterTable.getDbTableName()).getTable(), false); Partition part = null; if (alterTable.getPartitionSpec() != null) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java index 333db4db66..f14e6285cb 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java @@ -154,6 +154,7 @@ static public Deserializer getDeserializer(Configuration conf, ObjectInspector oi = deserializer.getObjectInspector(); String[] names = tableName.split("\\."); String last_name = names[names.length - 1]; + // 0 = db, 1 = table for (int i = 2; i < names.length; i++) { if (oi instanceof StructObjectInspector) { diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 517b0cc443..2dcc5b9824 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -340,7 +340,7 @@ private void acquireLocks() throws CommandProcessorException { fsd1.getDirName().compareTo(fsd2.getDirName())); for (FileSinkDesc desc : acidSinks) { TableDesc tableInfo = desc.getTableInfo(); - final TableName tn = HiveTableName.ofNullable(tableInfo.getTableName()); + final TableName tn = HiveTableName.of(tableInfo.getTableName()); long writeId = driverContext.getTxnManager().getTableWriteId(tn.getDb(), tn.getTable()); desc.setTableWriteId(writeId); diff --git ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java index 4b833b730c..b24b1b8012 100644 --- ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java +++ ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java @@ -52,6 +52,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -631,13 +632,13 @@ public long getSize() { } } - public void notifyTableChanged(String dbName, String tableName, long updateTime) { - LOG.debug("Table changed: {}.{}, at {}", dbName, tableName, updateTime); + public void notifyTableChanged(TableName tableName, long updateTime) { + LOG.debug("Table changed: {}, at {}", tableName, updateTime); // Invalidate all cache entries using this table. List entriesToInvalidate = null; rwLock.writeLock().lock(); try { - String key = (dbName.toLowerCase() + "." + tableName.toLowerCase()); + String key = (tableName.getNotEmptyDbTable()); Set entriesForTable = tableToEntryMap.get(key); if (entriesForTable != null) { // Possible concurrent modification issues if we try to remove cache entries while @@ -989,7 +990,7 @@ public void accept(NotificationEvent event) { QueryResultsCache cache = QueryResultsCache.getInstance(); if (cache != null) { long eventTime = event.getEventTime() * 1000L; - cache.notifyTableChanged(dbName, tableName, eventTime); + cache.notifyTableChanged(TableName.fromString(tableName, dbName), eventTime); } else { LOG.debug("Cache not instantiated, skipping event on {}.{}", dbName, tableName); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java index c05d699bd8..9ca155138e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.misc.msck; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -33,6 +34,7 @@ import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.thrift.TException; @@ -54,22 +56,21 @@ public int execute() throws HiveException, IOException, TException { Msck msck = new Msck(false, false); msck.init(context.getDb().getConf()); - String[] names = Utilities.getDbTableName(desc.getTableName()); + TableName tName = HiveTableName.of(desc.getTableName()); long partitionExpirySeconds = -1L; try (HiveMetaStoreClient msc = new HiveMetaStoreClient(context.getConf())) { - Table table = msc.getTable(SessionState.get().getCurrentCatalog(), names[0], names[1]); - String qualifiedTableName = Warehouse.getCatalogQualifiedTableName(table); + Table table = msc.getTable(tName); boolean msckEnablePartitionRetention = MetastoreConf.getBoolVar(context.getConf(), MetastoreConf.ConfVars.MSCK_REPAIR_ENABLE_PARTITION_RETENTION); if (msckEnablePartitionRetention) { partitionExpirySeconds = PartitionManagementTask.getRetentionPeriodInSeconds(table); - LOG.info("{} - Retention period ({}s) for partition is enabled for MSCK REPAIR..", qualifiedTableName, + LOG.info("{} - Retention period ({}s) for partition is enabled for MSCK REPAIR..", tName, partitionExpirySeconds); } } - MsckInfo msckInfo = new MsckInfo(SessionState.get().getCurrentCatalog(), names[0], names[1], + MsckInfo msckInfo = new MsckInfo(tName, desc.getPartitionsSpecs(), desc.getResFile(), desc.isRepairPartitions(), desc.isAddPartitions(), desc.isDropPartitions(), partitionExpirySeconds); return msck.repair(msckInfo); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java index 9e9d30f246..e301b80c6d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java @@ -63,7 +63,8 @@ public AlterTableType getType() { @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbTableName() { - return tableName.getNotEmptyDbTable(); + // TODO: use explain for tablename instead + return tableName.toString(); } @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -96,7 +97,7 @@ public EnvironmentContext getEnvironmentContext() { @Override public String getFullTableName() { - return tableName.getNotEmptyDbTable(); + return tableName.toString(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java index 2ee66e58a0..a0e4bca268 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.session.SessionState; /** @@ -146,8 +147,8 @@ private void finalizeAlterTableWithWriteIdOp(Table table, Table oldTable, List

colNames = Lists.newArrayList(colName.toLowerCase()); - String[] dbTab = Utilities.getDbTableName(desc.getDbTableName()); + TableName tName = HiveTableName.of(desc.getDbTableName()); if (null == part) { if (table.isPartitioned()) { Map tableProps = table.getParameters() == null ? @@ -207,20 +209,20 @@ private void getColumnDataColPathSpecified(Table table, Partition part, List partitions = new ArrayList(); partitions.add(part.getName()); cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); - List partitionColStat = context.getDb().getPartitionColumnStatistics(dbTab[0].toLowerCase(), - dbTab[1].toLowerCase(), partitions, colNames, false).get(part.getName()); + List partitionColStat = context.getDb().getPartitionColumnStatistics(tName.getDb(), + tName.getTable(), partitions, colNames, false).get(part.getName()); if (partitionColStat != null) { colStats.addAll(partitionColStat); } @@ -250,13 +252,13 @@ private void getColumnDataForPartitionKeyColumn(Table table, List c } private void getColumnsForNotPartitionKeyColumn(List cols, List colStats, - Deserializer deserializer, List colNames, String[] dbTab, Map tableProps) + Deserializer deserializer, List colNames, TableName tName, Map tableProps) throws HiveException { cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); - List parts = context.getDb().getPartitionNames(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), + List parts = context.getDb().getPartitionNames(tName.getDb(), tName.getTable(), (short) -1); AggrStats aggrStats = context.getDb().getAggrColStatsFor( - dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, parts, false); + tName.getDb(), tName.getTable(), colNames, parts, false); colStats.addAll(aggrStats.getColStats()); if (parts.size() == aggrStats.getPartsFound()) { StatsSetupConst.setColumnStatsState(tableProps, colNames); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/columnstats/AlterTableUpdateColumnStatistictAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/columnstats/AlterTableUpdateColumnStatistictAnalyzer.java index 0ae0a1ab5f..e64af920c2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/columnstats/AlterTableUpdateColumnStatistictAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/columnstats/AlterTableUpdateColumnStatistictAnalyzer.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork; @@ -56,8 +57,7 @@ protected void analyzeCommand(TableName tableName, Map partition String partitionName = getPartitionName(partitionSpec); String columnType = getColumnType(table, columnName); - ColumnStatsUpdateWork work = new ColumnStatsUpdateWork(partitionName, properties, table.getDbName(), - table.getTableName(), columnName, columnType); + ColumnStatsUpdateWork work = new ColumnStatsUpdateWork(partitionName, properties, HiveTableName.of(table), columnName, columnType); ColumnStatsUpdateTask task = (ColumnStatsUpdateTask) TaskFactory.get(work); // TODO: doesn't look like this path is actually ever exercised. Maybe this needs to be removed. addInputsOutputsAlterTable(tableName, partitionSpec, null, AlterTableType.UPDATESTATS, false); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AbstractAlterTableRenameAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AbstractAlterTableRenameAnalyzer.java index eb2280f3e5..e14bf9810b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AbstractAlterTableRenameAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AbstractAlterTableRenameAnalyzer.java @@ -43,7 +43,7 @@ protected void analyzeCommand(TableName tableName, Map partition throws SemanticException { TableName target = getQualifiedTableName((ASTNode) command.getChild(0)); - AlterTableRenameDesc desc = new AlterTableRenameDesc(tableName, null, isView(), target.getNotEmptyDbTable()); + AlterTableRenameDesc desc = new AlterTableRenameDesc(tableName, null, isView(), target); Table table = getTable(tableName.getNotEmptyDbTable(), true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(desc); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameDesc.java index 5c1ce893a5..071c7f843a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameDesc.java @@ -33,17 +33,17 @@ public class AlterTableRenameDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - private final String newName; + private final TableName newTableName; - public AlterTableRenameDesc(TableName tableName, ReplicationSpec replicationSpec, boolean expectView, String newName) - throws SemanticException { + public AlterTableRenameDesc(TableName tableName, ReplicationSpec replicationSpec, boolean expectView, + TableName newTableName) throws SemanticException { super(AlterTableType.RENAME, tableName, null, replicationSpec, false, expectView, null); - this.newName = newName; + this.newTableName = newTableName; } @Explain(displayName = "new table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getNewName() { - return newName; + public TableName getNewTableName() { + return newTableName; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java index f06776c19b..0afd8dc5be 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java @@ -37,8 +37,7 @@ public AlterTableRenameOperation(DDLOperationContext context, AlterTableRenameDe @Override public int execute() throws HiveException { - TableName tableName = HiveTableName.of(desc.getDbTableName()); - if (Utils.isBootstrapDumpInProgress(context.getDb(), tableName.getDb())) { + if (Utils.isBootstrapDumpInProgress(context.getDb(), HiveTableName.of(desc.getDbTableName()).getDb())) { LOG.error("DDLTask: Rename Table not allowed as bootstrap dump in progress"); throw new HiveException("Rename Table: Not allowed as bootstrap dump in progress"); } @@ -48,6 +47,6 @@ public int execute() throws HiveException { @Override protected void doAlteration(Table table, Partition partition) throws HiveException { - HiveTableName.setFrom(desc.getNewName(), table); + HiveTableName.setFrom(desc.getNewTableName(), table); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/touch/AlterTableTouchOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/touch/AlterTableTouchOperation.java index a58bc5a8fe..27df56072b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/touch/AlterTableTouchOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/touch/AlterTableTouchOperation.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; /** * Operation process of touching a table. @@ -55,7 +56,7 @@ public int execute() throws HiveException { throw new HiveException("Specified partition does not exist"); } try { - context.getDb().alterPartition(table.getCatalogName(), table.getDbName(), table.getTableName(), part, + context.getDb().alterPartition(HiveTableName.of(table), part, environmentContext, true); } catch (InvalidOperationException e) { throw new HiveException(e); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java index d1f36945fb..2b0de09ba8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java @@ -26,6 +26,7 @@ import org.apache.commons.collections.CollectionUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; @@ -36,6 +37,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -363,11 +365,9 @@ public ReplicationSpec getReplicationSpec() { } public Table toTable(HiveConf conf) throws HiveException { - String[] names = Utilities.getDbTableName(getViewName()); - String databaseName = names[0]; - String tableName = names[1]; + TableName tableName = HiveTableName.of(getViewName()); - Table tbl = new Table(databaseName, tableName); + Table tbl = new Table(tableName); tbl.setViewOriginalText(getViewOriginalText()); tbl.setViewExpandedText(getViewExpandedText()); if (isMaterialized()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index dc6d31a9cb..9c5f3fa460 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -84,8 +84,7 @@ private ColumnStatistics constructColumnStatsFromInput() colStats.getStatsDesc().getTableName()); return colStats; } - String dbName = work.dbName(); - String tableName = work.getTableName(); + TableName tableName = work.getTableName(); String partName = work.getPartName(); String colName = work.getColName(); String columnType = work.getColType(); @@ -291,7 +290,7 @@ private ColumnStatistics constructColumnStatsFromInput() } else { throw new SemanticException("Unsupported type"); } - ColumnStatisticsDesc statsDesc = getColumnStatsDesc(dbName, tableName, + ColumnStatisticsDesc statsDesc = getColumnStatsDesc(tableName, partName, partName == null); ColumnStatistics colStat = new ColumnStatistics(); colStat.setStatsDesc(statsDesc); @@ -300,11 +299,10 @@ private ColumnStatistics constructColumnStatsFromInput() return colStat; } - private ColumnStatisticsDesc getColumnStatsDesc(String dbName, - String tableName, String partName, boolean isTblLevel) { + private ColumnStatisticsDesc getColumnStatsDesc(TableName tableName, String partName, boolean isTblLevel) { ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(); - statsDesc.setDbName(dbName); - statsDesc.setTableName(tableName); + statsDesc.setDbName(tableName.getDb()); + statsDesc.setTableName(tableName.getTable()); statsDesc.setIsTblLevel(isTblLevel); if (!isTblLevel) { statsDesc.setPartName(partName); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index c1f94d165b..faad908b33 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -42,6 +42,7 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -744,7 +745,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, private boolean isPrintable(Object val) { if (val instanceof Boolean || val instanceof String || val instanceof Integer || val instanceof Long || val instanceof Byte - || val instanceof Float || val instanceof Double || val instanceof Path) { + || val instanceof Float || val instanceof Double || val instanceof Path || val instanceof TableName) { return true; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index d121a21f62..5476b25bf3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -70,6 +71,7 @@ import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.SkewedColumnPositionPair; import org.apache.hadoop.hive.ql.plan.api.OperatorType; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.stats.StatsCollectionContext; import org.apache.hadoop.hive.ql.stats.StatsPublisher; import org.apache.hadoop.hive.serde2.*; @@ -1581,7 +1583,8 @@ private void publishStats() throws HiveException { // key = "database.table/SP/DP/"LB/ // Hive store lowercase table name in metastore, and Counters is character case sensitive, so we // use lowercase table name as prefix here, as StatsTask get table name from metastore to fetch counter. - String prefix = conf.getTableInfo().getTableName().toLowerCase(); + String prefix = + TableName.fromString(conf.getTableInfo().getTableName(), conf.getTableInfo().getCatName(), conf.getTableInfo().getDbName()).toString(); prefix = Utilities.join(prefix, spSpec, dpSpec); prefix = prefix.endsWith(Path.SEPARATOR) ? prefix : prefix + Path.SEPARATOR; if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 51de87f2fd..dcb5d04e21 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.BucketCol; import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.SortCol; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.LoadFileDesc; import org.apache.hadoop.hive.ql.plan.LoadMultiFilesDesc; @@ -833,8 +834,7 @@ private void updatePartitionBucketSortColumns(Hive db, Table table, Partition pa } if (updateBucketCols || updateSortCols) { - db.alterPartition(table.getCatalogName(), table.getDbName(), table.getTableName(), - partn, null, true); + db.alterPartition(HiveTableName.of(table), partn, null, true); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index fcf206044e..129b600958 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -789,6 +789,7 @@ public static ArrayList makeList(Object... olist) { public static TableDesc getTableDesc(Table tbl) { Properties props = tbl.getMetadata(); props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName()); + props.put(TableDesc.META_TABLE_CAT_NAME, SessionState.get().getCurrentCatalog()); return (new TableDesc(tbl.getInputFormatClass(), tbl .getOutputFormatClass(), props)); } @@ -2305,43 +2306,6 @@ public static String formatBinaryString(byte[] array, int start, int length) { return names; } - /** - * Extract db and table name from dbtable string, where db and table are separated by "." - * If there is no db name part, set the current sessions default db - * @param dbtable - * @return String array with two elements, first is db name, second is table name - * @throws SemanticException - * @deprecated use {@link TableName} or {@link org.apache.hadoop.hive.ql.parse.HiveTableName} instead - */ - @Deprecated - public static String[] getDbTableName(String dbtable) throws SemanticException { - return getDbTableName(SessionState.get().getCurrentDatabase(), dbtable); - } - - /** - * Extract db and table name from dbtable string. - * @param defaultDb - * @param dbtable - * @return String array with two elements, first is db name, second is table name - * @throws SemanticException - * @deprecated use {@link TableName} or {@link org.apache.hadoop.hive.ql.parse.HiveTableName} instead - */ - @Deprecated - public static String[] getDbTableName(String defaultDb, String dbtable) throws SemanticException { - if (dbtable == null) { - return new String[2]; - } - String[] names = dbtable.split("\\."); - switch (names.length) { - case 2: - return names; - case 1: - return new String [] {defaultDb, dbtable}; - default: - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbtable); - } - } - public static void validateColumnNames(List colNames, List checkCols) throws SemanticException { Iterator checkColsIter = checkCols.iterator(); @@ -2362,44 +2326,6 @@ public static void validateColumnNames(List colNames, List check } } - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. All parts can be null. - * - * @param dbTableName - * @return a {@link TableName} - * @throws SemanticException - * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} - */ - @Deprecated - public static TableName getNullableTableName(String dbTableName) throws SemanticException { - return getNullableTableName(dbTableName, SessionState.get().getCurrentDatabase()); - } - - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. All parts can be null. - * - * @param dbTableName - * @param defaultDb - * @return a {@link TableName} - * @throws SemanticException - * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} - */ - @Deprecated - public static TableName getNullableTableName(String dbTableName, String defaultDb) throws SemanticException { - if (dbTableName == null) { - return new TableName(null, null, null); - } else { - try { - return TableName - .fromString(dbTableName, SessionState.get().getCurrentCatalog(), defaultDb); - } catch (IllegalArgumentException e) { - throw new SemanticException(e.getCause()); - } - } - } - /** * Gets the default notification interval to send progress updates to the tracker. Useful for * operators that may not output data for a while. diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java index b578d48ce1..3096bbba1a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java @@ -53,7 +53,6 @@ import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -301,7 +300,7 @@ a database ( directory ) throws SemanticException { Table table = new Table(metaData.getTable()); String dbName = dbNameToLoadIn == null ? table.getDbName() : dbNameToLoadIn; - TableName tableName = HiveTableName.ofNullable(table.getTableName(), dbName); + TableName tableName = TableName.fromString(table.getTableName(), dbName); String dbDotView = tableName.getNotEmptyDbTable(); CreateViewDesc desc = new CreateViewDesc(dbDotView, table.getAllCols(), null, table.getParameters(), table.getPartColNames(), false, false, false, table.getSd().getInputFormat(), table.getSd().getOutputFormat(), diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index 7e844d3164..d708a81927 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -250,7 +250,7 @@ private boolean shouldReplayEvent(FileStatus dir, DumpType dumpType, String dbNa HashMap mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState); - TableName tName = TableName.fromString(tableName, null, dbName); + TableName tName = TableName.fromString(tableName, dbName); AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(tName, partSpec, new ReplicationSpec(replState, replState), false, mapProp, false, false, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java index 939cbc3a35..fa50e20910 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java @@ -146,7 +146,7 @@ HashMap mapProp = new HashMap<>(); mapProp.put(REPL_CHECKPOINT_KEY, dumpRoot); - final TableName tName = TableName.fromString(tableDesc.getTableName(), null, tableDesc.getDatabaseName()); + final TableName tName = TableName.fromString(tableDesc.getTableName(), tableDesc.getDatabaseName()); AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(tName, partSpec, null, false, mapProp, false, false, null); return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterTblDesc), conf); diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java index e7d8e55695..42615a6190 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; /** * Implementation of a pre execute hook that updates the access @@ -76,7 +77,7 @@ public void run(HookContext hookContext) throws Exception { Table t = db.getTable(dbName, tblName); p = db.getPartition(t, p.getSpec(), false); p.setLastAccessTime(lastAccessTime); - db.alterPartition(null, dbName, tblName, p, null, false); + db.alterPartition(HiveTableName.of(t), p, null, false); t.setLastAccessTime(lastAccessTime); db.alterTable(dbName + "." + tblName, t, false, null, false); break; diff --git ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index 77878ca40b..b0d3602549 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -2445,7 +2445,7 @@ public static ValidWriteIdList getTableValidWriteIdListWithTxnList( } public static String getFullTableName(String dbName, String tableName) { - return TableName.fromString(tableName, null, dbName).getNotEmptyDbTable().toLowerCase(); + return TableName.fromString(tableName, dbName).getNotEmptyDbTable(); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 1f9fb3b897..c2fdca73b3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -192,6 +192,7 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveAugmentMaterializationRule; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType; @@ -733,37 +734,28 @@ public void alterTable(Table newTbl, boolean cascade, EnvironmentContext environ newTbl.getTableName(), newTbl, cascade, environmentContext, transactional); } - /** - * Updates the existing table metadata with the new metadata. - * - * @param fullyQlfdTblName - * name of the existing table - * @param newTbl - * new name of the table. could be the old name - * @param transactional - * Need to generate and save a table snapshot into the metastore? - * @throws HiveException - */ - public void alterTable(String fullyQlfdTblName, Table newTbl, EnvironmentContext environmentContext, + public void alterTable(TableName tableName, Table newTbl, EnvironmentContext environmentContext, boolean transactional) throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, false, environmentContext, transactional); + alterTable(tableName.getCat(), tableName.getDb(), tableName.getTable(), newTbl, false, environmentContext, transactional); } - public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade, + public void alterTable(TableName tableName, Table newTbl, boolean cascade, EnvironmentContext environmentContext, boolean transactional) throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional); + alterTable(tableName.getCat(), tableName.getDb(), tableName.getTable(), newTbl, cascade, environmentContext, transactional); } public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade, - EnvironmentContext environmentContext, boolean transactional, long writeId) - throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional, - writeId); + EnvironmentContext environmentContext, boolean transactional) + throws HiveException { + TableName tableName = HiveTableName.of(fullyQlfdTblName); + alterTable(tableName, newTbl, cascade, environmentContext, transactional); + } + + public void alterTable(TableName tableName, Table newTbl, boolean cascade, + EnvironmentContext environmentContext, boolean transactional, long writeId) throws HiveException { + alterTable(tableName.getCat(), tableName.getDb(), tableName.getTable(), newTbl, cascade, environmentContext, transactional, writeId); } public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade, @@ -850,16 +842,13 @@ public void updateCreationMetadata(String dbName, String tableName, CreationMeta public void alterPartition(String tblName, Partition newPart, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { - String[] names = Utilities.getDbTableName(tblName); - alterPartition(null, names[0], names[1], newPart, environmentContext, transactional); + alterPartition(HiveTableName.of(tblName), newPart, environmentContext, transactional); } /** * Updates the existing partition metadata with the new metadata. * - * @param dbName - * name of the exiting table's database - * @param tblName + * @param tableName * name of the existing table * @param newPart * new partition @@ -871,12 +860,12 @@ public void alterPartition(String tblName, Partition newPart, * if the changes in metadata is not acceptable * @throws HiveException */ - public void alterPartition(String catName, String dbName, String tblName, Partition newPart, + public void alterPartition(TableName tableName, Partition newPart, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { try { - if (catName == null) { - catName = getDefaultCatalog(conf); + if (tableName.getCat() == null) { + tableName = HiveTableName.of(tableName.toString()); } validatePartition(newPart); String location = newPart.getLocation(); @@ -893,11 +882,11 @@ public void alterPartition(String catName, String dbName, String tblName, Partit if (tableSnapshot != null) { newPart.getTPartition().setWriteId(tableSnapshot.getWriteId()); } else { - LOG.warn("Cannot get a table snapshot for " + tblName); + LOG.warn("Cannot get a table snapshot for " + tableName.getTable()); } } - getSynchronizedMSC().alter_partition(catName, - dbName, tblName, newPart.getTPartition(), environmentContext, + getSynchronizedMSC().alter_partition(tableName.getCat(), + tableName.getDb(), tableName.getTable(), newPart.getTPartition(), environmentContext, tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList()); } catch (MetaException e) { @@ -928,10 +917,9 @@ private void validatePartition(Partition newPart) throws HiveException { * if the changes in metadata is not acceptable * @throws HiveException */ - public void alterPartitions(String tblName, List newParts, + public void alterPartitions(TableName tblName, List newParts, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { - String[] names = Utilities.getDbTableName(tblName); List newTParts = new ArrayList(); try { @@ -951,7 +939,7 @@ public void alterPartitions(String tblName, List newParts, } newTParts.add(tmpPart.getTPartition()); } - getMSC().alter_partitions(names[0], names[1], newTParts, environmentContext, + getMSC().alter_partitions(tblName.getDb(), tblName.getTable(), newTParts, environmentContext, tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null, tableSnapshot != null ? tableSnapshot.getWriteId() : -1); } catch (MetaException e) { @@ -1166,8 +1154,8 @@ public void createTable(Table tbl, boolean ifNotExists) throws HiveException { * thrown if the drop fails */ public void dropTable(String tableName, boolean ifPurge) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - dropTable(names[0], names[1], true, true, ifPurge); + TableName tn = HiveTableName.of(tableName); + dropTable(tn.getDb(), tn.getTable(), true, true, ifPurge); } /** @@ -1301,8 +1289,7 @@ public Table getTable(final String tableName) throws HiveException { * table doesn't exist */ public Table getTable(final String tableName, boolean throwException) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - return this.getTable(names[0], names[1], throwException); + return this.getTable(HiveTableName.of(tableName), throwException); } /** @@ -1317,13 +1304,7 @@ public Table getTable(final String tableName, boolean throwException) throws Hiv * if there's an internal error or if the table doesn't exist */ public Table getTable(final String dbName, final String tableName) throws HiveException { - // TODO: catalog... etc everywhere - if (tableName.contains(".")) { - String[] names = Utilities.getDbTableName(tableName); - return this.getTable(names[0], names[1], true); - } else { - return this.getTable(dbName, tableName, true); - } + return this.getTable(TableName.fromString(tableName, dbName)); } /** @@ -1336,8 +1317,22 @@ public Table getTable(final String dbName, final String tableName) throws HiveEx * if there's an internal error or if the table doesn't exist */ public Table getTable(TableName tableName) throws HiveException { - return this.getTable(ObjectUtils.firstNonNull(tableName.getDb(), SessionState.get().getCurrentDatabase()), - tableName.getTable(), true); + return getTable(tableName, true); + } + + /** + * Returns metadata of the table + * + * @param tableName + * the tableName object + * @param throwException + * controls whether an exception is thrown or a returns a null + * @return the table + * @exception HiveException + * if there's an internal error or if the table doesn't exist + */ + public Table getTable(TableName tableName, boolean throwException) throws HiveException { + return this.getTable(tableName.getDb(), tableName.getTable(), throwException); } /** @@ -3355,7 +3350,7 @@ private void alterPartitionSpec(Table tbl, String partPath) throws HiveException, InvalidOperationException { alterPartitionSpecInMemory(tbl, partSpec, tpart, inheritTableSpecs, partPath); - alterPartition(tbl.getCatalogName(), tbl.getDbName(), tbl.getTableName(), + alterPartition(HiveTableName.of(tbl), new Partition(tbl, tpart), null, true); } @@ -3557,11 +3552,6 @@ public boolean dropPartition(String dbName, String tableName, List parti } } - public List getPartitionNames(String tblName, short max) throws HiveException { - String[] names = Utilities.getDbTableName(tblName); - return getPartitionNames(names[0], names[1], max); - } - public List getPartitionNames(String dbName, String tblName, short max) throws HiveException { List names = null; @@ -5235,8 +5225,7 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, } public Table newTable(String tableName) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - return new Table(names[0], names[1]); + return new Table(HiveTableName.of(tableName)); } public String getDelegationToken(String owner, String renewer) diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java index 3dcf876af3..811f5ee4e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java @@ -40,6 +40,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.io.HdfsUtils; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; @@ -233,6 +234,11 @@ public void truncateTable(String dbName, String tableName, } } + @Override + public org.apache.hadoop.hive.metastore.api.Table getTable(TableName tableName) throws TException { + return getTable(tableName.getCat(), tableName.getDb(), tableName.getTable()); + } + @Override public List getAllTables(String dbName) throws MetaException { List tableNames = super.getAllTables(dbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 6d5653690b..b73c08ef28 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -37,6 +37,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; @@ -158,6 +159,11 @@ public Table(String databaseName, String tableName) { this(getEmptyTable(databaseName, tableName)); } + public Table(TableName tableName) { + this(getEmptyTable(tableName.getDb(), tableName.getTable())); + this.setCatName(tableName.getCat()); + } + /** This api is used by getMetaData which require deep copy of metastore.api.table * and constraints copy */ @@ -785,6 +791,17 @@ public void setFields(List fields) { tTable.getSd().setCols(fields); } + /** + * Set the table metadata based on a {@link TableName} object. + * @param tableName the tableName object + * @return this + */ + public Table setFrom(TableName tableName) { + this.setDbName(tableName.getDb()); + this.setTableName(tableName.getTable()); + return this; + } + public void setNumBuckets(int nb) { tTable.getSd().setNumBuckets(nb); } @@ -879,6 +896,10 @@ public void setDbName(String databaseName) { tTable.setDbName(databaseName); } + public void setCatName(String catName){ + tTable.setCatName(catName); + } + public List getPartitionKeys() { return tTable.getPartitionKeys(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java index 9fc0416edb..59e13ed4df 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java @@ -20,6 +20,7 @@ import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Table; @@ -47,9 +48,8 @@ public static boolean isPartitionLevelStats(ASTNode tree) { public static Table getTable(ASTNode tree, BaseSemanticAnalyzer sa) throws SemanticException { String tableName = ColumnStatsSemanticAnalyzer.getUnescapedName((ASTNode) tree.getChild(0).getChild(0)); - String currentDb = SessionState.get().getCurrentDatabase(); - String [] names = Utilities.getDbTableName(currentDb, tableName); - return sa.getTable(names[0], names[1], true); + TableName tName = HiveTableName.of(tableName); + return sa.getTable(tName); } public static Map getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 4f1e23d7a6..74da4f51b8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -399,7 +399,6 @@ public static String getUnescapedName(ASTNode tableOrColumnNode, String currentD // table node Map.Entry dbTablePair = getDbTableNamePair(tableOrColumnNode); return TableName.fromString(dbTablePair.getValue(), - null, dbTablePair.getKey() == null ? currentDatabase : dbTablePair.getKey()) .getNotEmptyDbTable(); } else if (tokenType == HiveParser.StringLiteral) { @@ -440,14 +439,14 @@ public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalo throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName, dbName); + return TableName.fromString(tableName, dbName); } final String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); if (tableName.contains(".")) { throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName); + return HiveTableName.of(tableName); } /** @@ -1078,7 +1077,7 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit try { // get table metadata - tableName = HiveTableName.withNoDefault(getUnescapedName((ASTNode)ast.getChild(0))); + tableName = HiveTableName.of(getUnescapedName((ASTNode)ast.getChild(0))); boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE); if (testMode) { tableName = TableName.fromString(String.join("", conf.getVar(HiveConf.ConfVars.HIVETESTMODEPREFIX), @@ -1715,14 +1714,14 @@ protected Table getTable(String database, String tblName, boolean throwException : db.getTable(database, tblName, false); } catch (InvalidTableException e) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable()), e); + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, database).getNotEmptyDbTable()), e); } catch (Exception e) { throw new SemanticException(e.getMessage(), e); } if (tab == null && throwException) { // getTable needs a refactor with all ~50 occurences - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable())); + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, database).getNotEmptyDbTable())); } return tab; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 7b2e201e5a..7c56c5acef 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -132,6 +132,7 @@ import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -5321,18 +5322,16 @@ public RelNode getSrcRel() { @Override protected Table getTableObjectByName(String tabName, boolean throwException) throws HiveException { - String[] names = Utilities.getDbTableName(tabName); - final String tableName = names[1]; - final String dbName = names[0]; - final String fullyQualName = dbName + "." + tableName; - if (!tabNameToTabObject.containsKey(fullyQualName)) { - Table table = db.getTable(dbName, tableName, throwException); + // tabNameToTabObject is populated by org.apache.hadoop.hive.ql.metadata.Table, which doesn't yet use cats + final TableName tName = TableName.fromString(tabName, SessionState.get().getCurrentDatabase()); + if (!tabNameToTabObject.containsKey(tName.toString())) { + Table table = db.getTable(tName, throwException); if (table != null) { - tabNameToTabObject.put(fullyQualName, table); + tabNameToTabObject.put(tName.toString(), table); } return table; } - return tabNameToTabObject.get(fullyQualName); + return tabNameToTabObject.get(tName.toString()); } RexNode genRexNode(ASTNode expr, RowResolver input, diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java index cd9f88c53b..6da876a250 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java @@ -38,37 +38,22 @@ public HiveTableName(String catName, String dbName, String tableName) { * @throws SemanticException */ public static TableName of(Table table) throws SemanticException { - return ofNullable(table.getTableName(), table.getDbName()); + return ofNullable(table.getTableName(), table.getDbName()); // FIXME: this shouldn't call nullable } /** - * Set a @{@link Table} object's table and db names based on the provided string. - * @param dbTable the dbtable string + * Set a @{@link Table} object's table and db names based on the provided tableName object. + * @param tableName the tableName object * @param table the table to update * @return the table * @throws SemanticException */ - public static Table setFrom(String dbTable, Table table) throws SemanticException{ - TableName name = ofNullable(dbTable); - table.setTableName(name.getTable()); - table.setDbName(name.getDb()); + public static Table setFrom(TableName tableName, Table table) throws SemanticException{ + table.setTableName(tableName.getTable()); + table.setDbName(tableName.getDb()); return table; } - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. All parts can be null. - * - * @param dbTableName - * @return a {@link TableName} - * @throws SemanticException - * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} - */ - // to be @Deprecated - public static TableName ofNullable(String dbTableName) throws SemanticException { - return ofNullable(dbTableName, SessionState.get().getCurrentDatabase()); - } - /** * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a * {@link TableName}. All parts can be null. This method won't try to find the default db based on the session state. @@ -93,37 +78,21 @@ public static TableName ofNullableWithNoDefault(String dbTableName) throws Seman * @throws SemanticException * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} */ - // to be @Deprecated - public static TableName ofNullable(String dbTableName, String defaultDb) throws SemanticException { + @Deprecated + private static TableName ofNullable(String dbTableName, String defaultDb) throws SemanticException { // TODO: remove if (dbTableName == null) { return new TableName(null, null, null); } else { try { - return fromString(dbTableName, SessionState.get().getCurrentCatalog(), defaultDb); + // if a db is null, so should the catalog be. A workaround, while ofNullable exists at all + final String cat = defaultDb == null || defaultDb.trim().isEmpty() ? null : SessionState.get().getCurrentCatalog(); + return fromString(dbTableName, cat, defaultDb); } catch (IllegalArgumentException e) { throw new SemanticException(e); } } } - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. This method won't try to find the default db/catalog based on the session state. - * - * @param dbTableName not null - * @return a {@link TableName} - * @throws SemanticException if dbTableName is null - * @deprecated use {@link #of(String)} instead and use the default db/catalog. - */ - // to be @Deprecated - public static TableName withNoDefault(String dbTableName) throws SemanticException { - try { - return fromString(dbTableName, null, null); - } catch (IllegalArgumentException e) { - throw new SemanticException(e); - } - } - /** * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a * {@link TableName}. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index c21c6f15b6..e5551dfd60 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -323,7 +323,7 @@ public static boolean prepareImport(boolean isImportCmd, } if (StringUtils.isNotBlank(parsedTableName)) { - tblDesc.setTableName(TableName.fromString(parsedTableName, null, dbname)); + tblDesc.setTableName(TableName.fromString(parsedTableName, dbname)); } if (tblDesc.getTableName() == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 0de3730351..74cfa19a1b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2307,9 +2307,9 @@ private void getMetaData(QB qb, ReadEntity parentInput) // Whether we are using an acid compliant transaction manager has already been caught in // UpdateDeleteSemanticAnalyzer, so if we are updating or deleting and getting nonAcid // here, it means the table itself doesn't support it. - throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.getTableName().getTable()); + throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.getTableName().toString()); } else { - throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.getTableName().getTable()); + throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.getTableName().toString()); } } // TableSpec ts is got from the query (user specified), @@ -2350,14 +2350,13 @@ private void getMetaData(QB qb, ReadEntity parentInput) location = new Path(qb.getTableDesc().getLocation()); } else { // allocate a temporary output dir on the location of the table - String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - String[] names = Utilities.getDbTableName(tableName); + TableName tableName = HiveTableName.of(getUnescapedName((ASTNode) ast.getChild(0))); try { Warehouse wh = new Warehouse(conf); //Use destination table's db location. String destTableDb = qb.getTableDesc() != null ? qb.getTableDesc().getDatabaseName() : null; if (destTableDb == null) { - destTableDb = names[0]; + destTableDb = tableName.getDb(); } location = wh.getDatabasePath(db.getDatabase(destTableDb)); } catch (MetaException e) { @@ -7105,7 +7104,7 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L } private void setStatsForNonNativeTable(String dbName, String tableName) throws SemanticException { - TableName qTableName = HiveTableName.ofNullable(tableName, dbName); + TableName qTableName = TableName.fromString(tableName, dbName); Map mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); AlterTableUnsetPropertiesDesc alterTblDesc = new AlterTableUnsetPropertiesDesc(qTableName, null, null, false, @@ -7687,7 +7686,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) fileSinkColInfos = new ArrayList<>(); destTableIsTemporary = tblDesc.isTemporary(); destTableIsMaterialization = tblDesc.isMaterialization(); - tableName = TableName.fromString(tblDesc.getDbTableName(), null, tblDesc.getDatabaseName()); + tableName = tblDesc.getTableName(); tblProps = tblDesc.getTblProps(); } else if (viewDesc != null) { fieldSchemas = new ArrayList<>(); @@ -8287,7 +8286,7 @@ private void handleLineage(LoadTableDesc ltd, Operator output) } else if ( queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) { Path tlocation = null; - String tName = Utilities.getDbTableName(tableDesc.getDbTableName())[1]; + String tName = tableDesc.getTableName().getTable(); try { Warehouse wh = new Warehouse(conf); tlocation = wh.getDefaultTablePath(db.getDatabase(tableDesc.getDatabaseName()), @@ -11676,11 +11675,12 @@ private void setupStats(TableScanDesc tsDesc, QBParseInfo qbp, Table tab, String // Theoretically the key prefix could be any unique string shared // between TableScanOperator (when publishing) and StatsTask (when aggregating). // Here we use - // db_name.table_name + partitionSec + // cat.db_name.table_name + partitionSec // as the prefix for easy of read during explain and debugging. // Currently, partition spec can only be static partition. - String k = org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.encodeTableName(tblName) + Path.SEPARATOR; - tsDesc.setStatsAggPrefix(tab.getDbName()+"."+k); + String k = TableName.fromString(org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.encodeTableName(tblName), + tab.getCatalogName(), tab.getDbName()).toString() + Path.SEPARATOR; + tsDesc.setStatsAggPrefix(k); // set up WriteEntity for replication and txn stats WriteEntity we = new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED); @@ -12919,7 +12919,8 @@ protected void saveViewDefinition() throws SemanticException { sb.append(" FROM ("); sb.append(expandedText); sb.append(") "); - sb.append(HiveUtils.unparseIdentifier(Utilities.getDbTableName(createVwDesc.getViewName())[1], conf)); + final String viewName = HiveTableName.of(createVwDesc.getViewName()).getTable(); + sb.append(HiveUtils.unparseIdentifier(viewName, conf)); expandedText = sb.toString(); } } else { @@ -12953,7 +12954,8 @@ protected void saveViewDefinition() throws SemanticException { sb.append(" FROM ("); sb.append(expandedText); sb.append(") "); - sb.append(HiveUtils.unparseIdentifier(Utilities.getDbTableName(createVwDesc.getViewName())[1], conf)); + final String viewName = HiveTableName.of(createVwDesc.getViewName()).getTable(); + sb.append(HiveUtils.unparseIdentifier(viewName, conf)); expandedText = sb.toString(); } @@ -13467,7 +13469,6 @@ private boolean hasConstraints(final List partCols, final List cols = new ArrayList(); @@ -13505,7 +13506,7 @@ ASTNode analyzeCreateTable( RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); - LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine()); + LOG.info("Creating table " + qualifiedTabName + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); /* @@ -13703,10 +13704,9 @@ ASTNode analyzeCreateTable( throw new SemanticException( "Partition columns can only declared using their name and types in regular CREATE TABLE statements"); } - tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, + isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); CreateTableDesc crtTblDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, @@ -13729,12 +13729,11 @@ ASTNode analyzeCreateTable( case ctt: // CREATE TRANSACTIONAL TABLE if (isExt) { throw new SemanticException( - qualifiedTabName.getTable() + " cannot be declared transactional because it's an external table"); + qualifiedTabName.getNotEmptyDbTable() + " cannot be declared transactional because it's an external table"); } - tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, - isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, false, tblProps); + tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, + isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, false, tblProps); CreateTableDesc crtTranTblDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, @@ -13755,9 +13754,8 @@ ASTNode analyzeCreateTable( case CTLT: // create table like tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); Table likeTable = getTable(likeTableName, false); if (likeTable != null) { @@ -13769,7 +13767,7 @@ ASTNode analyzeCreateTable( updateDefaultTblProps(likeTable.getParameters(), tblProps, null); } } - CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(dbDotTab, isExt, isTemporary, + CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(qualifiedTabName.toString(), isExt, isTemporary, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists, likeTableName, isUserStorageFormat); @@ -13796,9 +13794,9 @@ ASTNode analyzeCreateTable( // Verify that the table does not already exist // dumpTable is only used to check the conflict for non-temporary tables try { - Table dumpTable = db.newTable(dbDotTab); + Table dumpTable = new Table(qualifiedTabName); if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) { - throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTab)); + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(qualifiedTabName.toString())); } } catch (HiveException e) { throw new SemanticException(e); @@ -13840,9 +13838,8 @@ ASTNode analyzeCreateTable( } tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); tableDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partColNames, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, @@ -13865,12 +13862,12 @@ ASTNode analyzeCreateTable( } /** Adds entities for create table/create view. */ - private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, + private void addDbAndTabToOutputs(TableName tableName, TableType type, boolean isTemporary, Map tblProps) throws SemanticException { - Database database = getDatabase(qualifiedTabName[0]); + Database database = getDatabase(tableName.getDb()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED)); - Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]); + Table t = new Table(tableName); t.setParameters(tblProps); t.setTableType(type); t.setTemporary(isTemporary); @@ -13879,7 +13876,6 @@ private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCtx) throws SemanticException { TableName qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0)); - final String dbDotTable = qualTabName.getNotEmptyDbTable(); List cols = null; boolean ifNotExists = false; boolean rewriteEnabled = true; @@ -13897,7 +13893,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); - LOG.info("Creating view " + dbDotTable + " position=" + LOG.info("Creating view " + qualTabName + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); for (int num = 1; num < numCh; num++) { @@ -13979,9 +13975,9 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt // Verify that the table does not already exist // dumpTable is only used to check the conflict for non-temporary tables try { - Table dumpTable = db.newTable(dbDotTable); + Table dumpTable = new Table(qualTabName); if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) { - throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTable)); + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(qualTabName.toString())); } } catch (HiveException e) { throw new SemanticException(e); @@ -14022,26 +14018,24 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt if (tblProps == null) { tblProps = new HashMap<>(); } - tblProps = convertToAcidByDefault(storageFormat, dbDotTable, null, tblProps); + tblProps = convertToAcidByDefault(storageFormat, qualTabName.toString(), null, tblProps); } createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, tblProps, partColNames, sortColNames, distributeColNames, + qualTabName.toString(), cols, comment, tblProps, partColNames, sortColNames, distributeColNames, ifNotExists, isRebuild, rewriteEnabled, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps()); - addDbAndTabToOutputs(new String[] {qualTabName.getDb(), qualTabName.getTable()}, TableType.MATERIALIZED_VIEW, - false, tblProps); + addDbAndTabToOutputs(qualTabName, TableType.MATERIALIZED_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW); } else { createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, tblProps, partColNames, + qualTabName.toString(), cols, comment, tblProps, partColNames, ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getSerde()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createVwDesc))); - addDbAndTabToOutputs(new String[] {qualTabName.getDb(), qualTabName.getTable()}, - TableType.VIRTUAL_VIEW, false, tblProps); + addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATEVIEW); } qb.setViewDesc(createVwDesc); @@ -15461,7 +15455,7 @@ protected String getFullTableNameForSQL(ASTNode n) throws SemanticException { switch (n.getType()) { case HiveParser.TOK_TABNAME: TableName tableName = getQualifiedTableName(n); - return HiveTableName.ofNullable(HiveUtils.unparseIdentifier(tableName.getTable(), this.conf), + return TableName.fromString(HiveUtils.unparseIdentifier(tableName.getTable(), this.conf), HiveUtils.unparseIdentifier(tableName.getDb(), this.conf)).getNotEmptyDbTable(); case HiveParser.TOK_TABREF: return getFullTableNameForSQL((ASTNode) n.getChild(0)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 2f3fc6c50a..e4d88927d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.HiveStatsUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -479,18 +480,19 @@ private Path getDefaultCtasLocation(final ParseContext pCtx) throws SemanticExce try { String protoName = null; boolean isExternal = false; + TableName tableName = null; if (pCtx.getQueryProperties().isCTAS()) { - protoName = pCtx.getCreateTable().getDbTableName(); + tableName = pCtx.getCreateTable().getTableName(); isExternal = pCtx.getCreateTable().isExternal(); } else if (pCtx.getQueryProperties().isMaterializedView()) { protoName = pCtx.getCreateViewDesc().getViewName(); + tableName = HiveTableName.of(protoName); } - String[] names = Utilities.getDbTableName(protoName); - if (!db.databaseExists(names[0])) { - throw new SemanticException("ERROR: The database " + names[0] + " does not exist."); + if (!db.databaseExists(tableName.getDb())) { + throw new SemanticException("ERROR: The database " + tableName.getDb() + " does not exist."); } Warehouse wh = new Warehouse(conf); - return wh.getDefaultTablePath(db.getDatabase(names[0]), names[1], isExternal); + return wh.getDefaultTablePath(db.getDatabase(tableName.getDb()), tableName.getTable(), isExternal); } catch (HiveException e) { throw new SemanticException(e); } catch (MetaException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java index b11afe80a1..080f383475 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java @@ -168,7 +168,7 @@ private void writeMetaData(PartitionIterable partitions) throws SemanticExceptio if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.getTableName().getTable()); + + tableSpec.getTableName().getNotEmptyDbTable()); } managedTableCopyPaths = new PartitionExport( paths, partitions, distCpDoAsUser, conf, mmCtx).write(replicationSpec, isExportTask); @@ -386,7 +386,7 @@ public AuthEntities getAuthEntities() throws SemanticException { if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.getTableName().getTable()); + + tableSpec.getTableName().getNotEmptyDbTable()); } for (Partition partition : partitions) { authEntities.inputs.add(new ReadEntity(partition)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java index 6f98373c9e..2602b231c6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName; final String actualTblName = fks.get(0).getFktable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLForeignKey fk : fks) { // If parent table is in the same database, change it to the actual db on destination diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java index 995c5d2f84..01458cd207 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? nns.get(0).getTable_db() : context.dbName; final String actualTblName = nns.get(0).getTable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLNotNullConstraint nn : nns) { nn.setTable_db(actualDbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java index f6decc27fc..d057e7ba21 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName; final String actualTblName = pks.get(0).getTable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLPrimaryKey pk : pks) { pk.setTable_db(actualDbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java index e1c1d3a180..3d5ffdb55e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName; final String actualTblName = uks.get(0).getTable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLUniqueConstraint uk : uks) { uk.setTable_db(actualDbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java index 34d3b00500..ec2aa6938a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java @@ -23,10 +23,8 @@ import org.apache.hadoop.hive.ql.ddl.table.constraint.drop.AlterTableDropConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; -import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; -import java.io.Serializable; import java.util.Collections; import java.util.List; @@ -37,7 +35,7 @@ DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload()); final String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName; final String actualTblName = msg.getTable(); - final TableName tName = HiveTableName.ofNullable(actualTblName, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); String constraintName = msg.getConstraint(); AlterTableDropConstraintDesc dropConstraintsDesc = diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java index 066549d9cd..10630ea61a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.parse.repl.load.message; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.messaging.DropPartitionMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.partition.drop.AlterTableDropPartitionDesc; @@ -24,11 +25,9 @@ import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import java.io.Serializable; import java.util.Collections; import java.util.List; import java.util.Map; @@ -45,7 +44,7 @@ ReplUtils.genPartSpecs(new Table(msg.getTableObj()), msg.getPartitions()); if (partSpecs.size() > 0) { AlterTableDropPartitionDesc dropPtnDesc = - new AlterTableDropPartitionDesc(HiveTableName.ofNullable(actualTblName, actualDbName), partSpecs, true, + new AlterTableDropPartitionDesc(TableName.fromString(actualTblName, actualDbName), partSpecs, true, context.eventOnlyReplicationSpec()); Task dropPtnTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc), context.hiveConf diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java index 7a4cb93c12..7cf396fa37 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java @@ -55,14 +55,14 @@ } } - TableName oldName = TableName.fromString(tableObjBefore.getTableName(), null, oldDbName); - TableName newName = TableName.fromString(tableObjAfter.getTableName(), null, newDbName); + TableName oldName = TableName.fromString(tableObjBefore.getTableName(), oldDbName); + TableName newName = TableName.fromString(tableObjAfter.getTableName(), newDbName); ReplicationSpec replicationSpec = context.eventOnlyReplicationSpec(); if (ReplUtils.isTableMigratingToTransactional(context.hiveConf, tableObjAfter)) { replicationSpec.setMigratingToTxnTable(); } AlterTableRenameDesc renameTableDesc = - new AlterTableRenameDesc(oldName, replicationSpec, false, newName.getNotEmptyDbTable()); + new AlterTableRenameDesc(oldName, replicationSpec, false, newName); renameTableDesc.setWriteId(msg.getWriteId()); Task renameTableTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, renameTableDesc), context.hiveConf); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java index 6c3a7ebb0e..5c40fde92f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java @@ -37,7 +37,7 @@ @Override public List> handle(Context context) throws SemanticException { AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload()); - final TableName tName = TableName.fromString(msg.getTable(), null, + final TableName tName = TableName.fromString(msg.getTable(), context.isDbNameEmpty() ? msg.getDB() : context.dbName); Map partSpec = new LinkedHashMap<>(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java index 2b12be4c46..ce45eb8c6f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java @@ -33,7 +33,7 @@ @Override public List> handle(Context context) throws SemanticException { AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload()); - final TableName tName = TableName.fromString(msg.getTable(), null, + final TableName tName = TableName.fromString(msg.getTable(), context.isDbNameEmpty() ? msg.getDB() : context.dbName); TruncateTableDesc truncateTableDesc = new TruncateTableDesc(tName, null, context.eventOnlyReplicationSpec()); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java index 20f7d2e0e4..40bff112c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java @@ -197,7 +197,7 @@ public String getTableName() { } else if (work.getTableSpecs() != null) { return work.getTableSpecs().getTableName().getTable(); } else if (getLoadFileDesc().getCtasCreateTableDesc() != null) { - return getLoadFileDesc().getCtasCreateTableDesc().getDbTableName(); + return getLoadFileDesc().getCtasCreateTableDesc().getTableName().toString(); } else { return getLoadFileDesc().getCreateViewDesc().getViewName(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java index c90ea437f5..c79f4971da 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java @@ -21,6 +21,7 @@ import java.io.Serializable; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -39,8 +40,7 @@ private static final long serialVersionUID = 1L; private final String partName; private final Map mapProp; - private final String dbName; - private final String tableName; + private final TableName tableName; private final String colName; private final String colType; private final ColumnStatistics colStats; @@ -50,13 +50,11 @@ public ColumnStatsUpdateWork(String partName, Map mapProp, - String dbName, - String tableName, + TableName tableName, String colName, String colType) { this.partName = partName; this.mapProp = mapProp; - this.dbName = dbName; this.tableName = tableName; this.colName = colName; this.colType = colType; @@ -69,8 +67,7 @@ public ColumnStatsUpdateWork(ColumnStatistics colStats, boolean isMigratingToTxn this.isMigratingToTxn = isMigratingToTxn; this.partName = null; this.mapProp = null; - this.dbName = null; - this.tableName = null; + this.tableName = null; // FIXME: This won't do this.colName = null; this.colType = null; } @@ -88,11 +85,7 @@ public String getPartName() { return mapProp; } - public String dbName() { - return dbName; - } - - public String getTableName() { + public TableName getTableName() { return tableName; } @@ -117,7 +110,7 @@ public void setWriteId(long writeId) { @Override public String getFullTableName() { - return dbName + "." + tableName; + return tableName.getNotEmptyDbTable(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index 41fbe2eabf..3b61026bf5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -52,7 +52,7 @@ public ImportTableDesc(String dbName, Table table) throws Exception { } this.dbName = dbName; - TableName tableName = HiveTableName.ofNullable(table.getTableName(), dbName); + final TableName tableName = TableName.fromString(table.getTableName(), dbName); this.createTblDesc = new CreateTableDesc(tableName, false, // isExternal: set to false here, can be overwritten by the IMPORT stmt diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index 980f39b681..d99c0fa4b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -33,6 +33,7 @@ import java.util.Properties; import java.util.Set; +import org.apache.commons.lang3.ObjectUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StatsSetupConst; @@ -370,9 +371,13 @@ public static TableDesc getTableDesc(CreateTableDesc crtTblDesc, String cols, crtTblDesc.getNullFormat()); } - if (crtTblDesc.getDbTableName() != null && crtTblDesc.getDatabaseName() != null) { + if (crtTblDesc.getTableName() != null && crtTblDesc.getDatabaseName() != null) { + // FIXME: This is now deprecated, should be handled more gracefully properties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME, - crtTblDesc.getDbTableName()); + crtTblDesc.getTableName().getNotEmptyDbTable()); + // currently there are cases when the category can be null in the TableName object + properties.setProperty(TableDesc.META_TABLE_CAT_NAME, + ObjectUtils.firstNonNull(crtTblDesc.getTableName().getCat(), SessionState.get().getCurrentCatalog())); } if (crtTblDesc.getTblProps() != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index 7993779562..9061ddd6d2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -53,6 +53,7 @@ private java.util.Properties properties; private Map jobProperties; private Map jobSecrets; + public static final String META_TABLE_CAT_NAME = "cat"; public static final String SECRET_PREFIX = "TABLE_SECRET"; public static final String SECRET_DELIMIT = "#"; @@ -171,6 +172,10 @@ public String getDbName() { return properties.getProperty(hive_metastoreConstants.META_TABLE_DB); } + public String getCatName() { + return properties.getProperty(META_TABLE_CAT_NAME); + } + @Explain(displayName = "input format") public String getInputFileFormatClassName() { return getInputFileFormatClass().getName(); diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java index 537b9de5db..4fc9249d10 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java @@ -20,11 +20,13 @@ import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; @@ -64,7 +66,12 @@ public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjec dbTable = new String[] {null, null}; } else { if (privSubjectDesc.getTable()) { - dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); + if (privSubjectDesc.getObject() == null) { + dbTable = new String[] {null, null}; + } else { + final TableName tn = HiveTableName.of(privSubjectDesc.getObject()); + dbTable = new String[] {tn.getDb(), tn.getTable()}; + } } else { dbTable = new String[] {privSubjectDesc.getObject(), null}; } diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java index 53b3065a88..8bd9280a2b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -46,6 +47,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.plan.BasicStatsNoJobWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -314,7 +316,7 @@ private int aggregateStats(ExecutorService threadPool, Hive db) { private int updatePartitions(Hive db, List scs, Table table) throws InvalidOperationException, HiveException { - String tableFullName = table.getFullyQualifiedName(); + TableName tableName = HiveTableName.of(table); if (scs.isEmpty()) { return 0; @@ -342,13 +344,13 @@ private int updatePartitions(Hive db, List scs, Table table LOG.debug("Collectors.size(): {}", collectorsByTable.keySet()); if (collectorsByTable.keySet().size() < 1) { - LOG.warn("Collectors are empty! ; {}", tableFullName); + LOG.warn("Collectors are empty! ; {}", tableName); } // for now this should be true... assert (collectorsByTable.keySet().size() <= 1); - LOG.debug("Updating stats for: {}", tableFullName); + LOG.debug("Updating stats for: {}", tableName); for (String partName : collectorsByTable.keySet()) { ImmutableList values = collectorsByTable.get(partName); @@ -358,19 +360,19 @@ private int updatePartitions(Hive db, List scs, Table table } if (values.get(0).result instanceof Table) { - db.alterTable(tableFullName, (Table) values.get(0).result, environmentContext, true); - LOG.debug("Updated stats for {}.", tableFullName); + db.alterTable(tableName, (Table) values.get(0).result, environmentContext, true); + LOG.debug("Updated stats for {}.", tableName); } else { if (values.get(0).result instanceof Partition) { List results = Lists.transform(values, FooterStatCollector.EXTRACT_RESULT_FUNCTION); - db.alterPartitions(tableFullName, results, environmentContext, true); - LOG.debug("Bulk updated {} partitions of {}.", results.size(), tableFullName); + db.alterPartitions(tableName, results, environmentContext, true); + LOG.debug("Bulk updated {} partitions of {}.", results.size(), tableName); } else { throw new RuntimeException("inconsistent"); } } } - LOG.debug("Updated stats for: {}", tableFullName); + LOG.debug("Updated stats for: {}", tableName); return 0; } diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java index 6eb1ca2645..d781b5c4ed 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.Warehouse; @@ -50,6 +51,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.plan.BasicStatsWork; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; @@ -192,11 +194,12 @@ private String getAggregationPrefix(Table table, Partition partition) throws Met private String getAggregationPrefix0(Table table, Partition partition) throws MetaException { - // prefix is of the form dbName.tblName - String prefix = table.getDbName() + "." + MetaStoreUtils.encodeTableName(table.getTableName()); + // prefix is of the form catalog.dbName.tblName + String prefix = TableName + .fromString(MetaStoreUtils.encodeTableName(table.getTableName()), table.getCatalogName(), table.getDbName()) + .toString(); // FIXME: this is a secret contract; reusein getAggrKey() creates a more closer relation to the StatsGatherer // prefix = work.getAggKey(); - prefix = prefix.toLowerCase(); if (partition != null) { return Utilities.join(prefix, Warehouse.makePartPath(partition.getSpec())); } @@ -250,7 +253,7 @@ private int aggregateStats(Hive db) { List partitions = getPartitionsList(db); - String tableFullName = table.getDbName() + "." + table.getTableName(); + TableName tableName = HiveTableName.of(table); List partishes = new ArrayList<>(); @@ -264,12 +267,12 @@ private int aggregateStats(Hive db) { if (res == null) { return 0; } - db.alterTable(tableFullName, res, environmentContext, true); + db.alterTable(tableName, res, environmentContext, true); if (conf.getBoolVar(ConfVars.TEZ_EXEC_SUMMARY)) { - console.printInfo("Table " + tableFullName + " stats: [" + toString(p.getPartParameters()) + ']'); + console.printInfo("Table " + tableName + " stats: [" + toString(p.getPartParameters()) + ']'); } - LOG.info("Table " + tableFullName + " stats: [" + toString(p.getPartParameters()) + ']'); + LOG.info("Table " + tableName + " stats: [" + toString(p.getPartParameters()) + ']'); } else { // Partitioned table: @@ -332,7 +335,7 @@ public Void call() throws Exception { } if (!updates.isEmpty()) { - db.alterPartitions(tableFullName, updates, environmentContext, true); + db.alterPartitions(tableName, updates, environmentContext, true); } if (work.isStatsReliable() && updates.size() != processors.size()) { LOG.info("Stats should be reliadble...however seems like there were some issue.. => ret 1"); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index b6a6bab6cb..3362769ec4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -125,35 +125,6 @@ public void testSerializeTimestamp() { SerializationUtilities.serializeExpression(desc)).getExprString()); } - @Test - public void testgetDbTableName() throws HiveException{ - String tablename; - String [] dbtab; - SessionState.start(new HiveConf(this.getClass())); - String curDefaultdb = SessionState.get().getCurrentDatabase(); - - //test table without db portion - tablename = "tab1"; - dbtab = Utilities.getDbTableName(tablename); - assertEquals("db name", curDefaultdb, dbtab[0]); - assertEquals("table name", tablename, dbtab[1]); - - //test table with db portion - tablename = "dab1.tab1"; - dbtab = Utilities.getDbTableName(tablename); - assertEquals("db name", "dab1", dbtab[0]); - assertEquals("table name", "tab1", dbtab[1]); - - //test invalid table name - tablename = "dab1.tab1.x1"; - try { - dbtab = Utilities.getDbTableName(tablename); - fail("exception was expected for invalid table name"); - } catch(HiveException ex){ - assertEquals("Invalid table name " + tablename, ex.getMessage()); - } - } - @Test public void testReplaceTaskId() { String taskID = "000000"; diff --git ql/src/test/results/clientnegative/alter_view_as_select_not_exist.q.out ql/src/test/results/clientnegative/alter_view_as_select_not_exist.q.out index 5ec4563b3d..bda8be4107 100644 --- ql/src/test/results/clientnegative/alter_view_as_select_not_exist.q.out +++ ql/src/test/results/clientnegative/alter_view_as_select_not_exist.q.out @@ -3,4 +3,4 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW testView POSTHOOK: type: DROPVIEW FAILED: SemanticException [Error 10216]: Cannot ALTER VIEW AS SELECT if view currently does not exist - The following view does not exist: default.testView + The following view does not exist: hive.default.testview diff --git ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out index 5261a3bd15..4d8de3e53f 100644 --- ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out +++ ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out @@ -6,7 +6,7 @@ WHERE key=86 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@testViewPart +PREHOOK: Output: default@testviewpart POSTHOOK: query: CREATE VIEW testViewPart PARTITIONED ON (value) AS SELECT key, value @@ -15,8 +15,8 @@ WHERE key=86 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@testViewPart -POSTHOOK: Lineage: testViewPart.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Output: default@testviewpart +POSTHOOK: Lineage: testviewpart.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: ALTER VIEW testViewPart ADD PARTITION (value='val_86') PARTITION (value='val_xyz') PREHOOK: type: ALTERTABLE_ADDPARTS @@ -76,4 +76,4 @@ Expanded Query: SELECT `src`.`key`, `src`.`value` FROM `default`.`src` WHERE `src`.`key`=86 FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has partitions - The following view has partition, it could not be replaced: default.testViewPart + The following view has partition, it could not be replaced: hive.default.testviewpart diff --git ql/src/test/results/clientnegative/authorization_explain.q.out ql/src/test/results/clientnegative/authorization_explain.q.out index 1ba3a6cd1a..df4eb9d3e2 100644 --- ql/src/test/results/clientnegative/authorization_explain.q.out +++ ql/src/test/results/clientnegative/authorization_explain.q.out @@ -13,11 +13,11 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.authorization_explain if not exists: true input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.authorization_explain PREHOOK: query: create table if not exists authorization_explain (key int, value string) PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out index 3261f78fab..11992f7208 100644 --- ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out +++ ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out @@ -6,12 +6,12 @@ PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part +PREHOOK: Output: default@employee_part POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part +POSTHOOK: Output: default@employee_part PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values_autogather.q.out ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values_autogather.q.out index 3261f78fab..11992f7208 100644 --- ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values_autogather.q.out +++ ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values_autogather.q.out @@ -6,12 +6,12 @@ PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part +PREHOOK: Output: default@employee_part POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part +POSTHOOK: Output: default@employee_part PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out index 5252e6ca07..28aa6df902 100644 --- ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out +++ ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out @@ -6,12 +6,12 @@ PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part +PREHOOK: Output: default@employee_part POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part +POSTHOOK: Output: default@employee_part PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientnegative/columnstats_tbllvl.q.out ql/src/test/results/clientnegative/columnstats_tbllvl.q.out index 5aa4ae6404..b0e8e99d96 100644 --- ql/src/test/results/clientnegative/columnstats_tbllvl.q.out +++ ql/src/test/results/clientnegative/columnstats_tbllvl.q.out @@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@UserVisits_web_text_none +PREHOOK: Output: default@uservisits_web_text_none POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( sourceIP string, destURL string, @@ -29,7 +29,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@UserVisits_web_text_none +POSTHOOK: Output: default@uservisits_web_text_none PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out index 5aa4ae6404..b0e8e99d96 100644 --- ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out +++ ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out @@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@UserVisits_web_text_none +PREHOOK: Output: default@uservisits_web_text_none POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( sourceIP string, destURL string, @@ -29,7 +29,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@UserVisits_web_text_none +POSTHOOK: Output: default@uservisits_web_text_none PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientnegative/create_external_transactional.q.out ql/src/test/results/clientnegative/create_external_transactional.q.out index 6a0f5c14bf..dcd42096d2 100644 --- ql/src/test/results/clientnegative/create_external_transactional.q.out +++ ql/src/test/results/clientnegative/create_external_transactional.q.out @@ -1 +1 @@ -FAILED: SemanticException transactional_external cannot be declared transactional because it's an external table +FAILED: SemanticException default.transactional_external cannot be declared transactional because it's an external table diff --git ql/src/test/results/clientnegative/create_or_replace_view1.q.out ql/src/test/results/clientnegative/create_or_replace_view1.q.out index 4610ff3333..6a27ce2eca 100644 --- ql/src/test/results/clientnegative/create_or_replace_view1.q.out +++ ql/src/test/results/clientnegative/create_or_replace_view1.q.out @@ -26,4 +26,4 @@ POSTHOOK: Input: default@v POSTHOOK: Output: default@v POSTHOOK: Output: default@v@ds=1/hr=2 FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has partitions - The following view has partition, it could not be replaced: default.v + The following view has partition, it could not be replaced: hive.default.v diff --git ql/src/test/results/clientnegative/create_or_replace_view2.q.out ql/src/test/results/clientnegative/create_or_replace_view2.q.out index 4610ff3333..6a27ce2eca 100644 --- ql/src/test/results/clientnegative/create_or_replace_view2.q.out +++ ql/src/test/results/clientnegative/create_or_replace_view2.q.out @@ -26,4 +26,4 @@ POSTHOOK: Input: default@v POSTHOOK: Output: default@v POSTHOOK: Output: default@v@ds=1/hr=2 FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has partitions - The following view has partition, it could not be replaced: default.v + The following view has partition, it could not be replaced: hive.default.v diff --git ql/src/test/results/clientnegative/create_or_replace_view3.q.out ql/src/test/results/clientnegative/create_or_replace_view3.q.out index 114691e501..e8260db87e 100644 --- ql/src/test/results/clientnegative/create_or_replace_view3.q.out +++ ql/src/test/results/clientnegative/create_or_replace_view3.q.out @@ -1,2 +1,2 @@ FAILED: SemanticException [Error 10218]: Existing table is not a view - The following is an existing table, not a view: default.src + The following is an existing table, not a view: hive.default.src diff --git ql/src/test/results/clientnegative/create_view_failure1.q.out ql/src/test/results/clientnegative/create_view_failure1.q.out index b960a5f5a2..3e20e16412 100644 --- ql/src/test/results/clientnegative/create_view_failure1.q.out +++ ql/src/test/results/clientnegative/create_view_failure1.q.out @@ -15,4 +15,4 @@ PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@xxx12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Table already exists: default.xxx12 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Table already exists: hive.default.xxx12 diff --git ql/src/test/results/clientnegative/decimal_precision.q.out ql/src/test/results/clientnegative/decimal_precision.q.out index baadfdc3a7..ea0e256d29 100644 --- ql/src/test/results/clientnegative/decimal_precision.q.out +++ ql/src/test/results/clientnegative/decimal_precision.q.out @@ -8,12 +8,12 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION +PREHOOK: Output: default@decimal_precision POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION(`dec` decimal) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION +POSTHOOK: Output: default@decimal_precision FAILED: SemanticException [Error 10029]: Line 3:15 Invalid numerical constant '123456789012345678901234567890.123456789bd' diff --git ql/src/test/results/clientnegative/decimal_precision_1.q.out ql/src/test/results/clientnegative/decimal_precision_1.q.out index 569f9cdae5..b87ffe5819 100644 --- ql/src/test/results/clientnegative/decimal_precision_1.q.out +++ ql/src/test/results/clientnegative/decimal_precision_1.q.out @@ -8,12 +8,12 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION +PREHOOK: Output: default@decimal_precision POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION(`dec` decimal) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION +POSTHOOK: Output: default@decimal_precision FAILED: SemanticException [Error 10029]: Line 3:46 Invalid numerical constant '1234567890123456789.0123456789bd' diff --git ql/src/test/results/clientnegative/delete_non_acid_table.q.out ql/src/test/results/clientnegative/delete_non_acid_table.q.out index dafac6d7df..19fd5fb426 100644 --- ql/src/test/results/clientnegative/delete_non_acid_table.q.out +++ ql/src/test/results/clientnegative/delete_non_acid_table.q.out @@ -34,4 +34,4 @@ POSTHOOK: Input: default@not_an_acid_table2 -1070883071 0ruyd6Y50JpdGRf6HqD -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet -FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table not_an_acid_table2 that is not transactional +FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table default.not_an_acid_table2 that is not transactional diff --git ql/src/test/results/clientnegative/deletejar.q.out ql/src/test/results/clientnegative/deletejar.q.out index 2827196501..606738b10c 100644 --- ql/src/test/results/clientnegative/deletejar.q.out +++ ql/src/test/results/clientnegative/deletejar.q.out @@ -1,5 +1,5 @@ PREHOOK: query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DELETEJAR +PREHOOK: Output: default@deletejar FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot validate serde: org.apache.hadoop.hive.serde2.TestSerDe diff --git ql/src/test/results/clientnegative/desc_failure1.q.out ql/src/test/results/clientnegative/desc_failure1.q.out index 29ed890c69..54a5d255f4 100644 --- ql/src/test/results/clientnegative/desc_failure1.q.out +++ ql/src/test/results/clientnegative/desc_failure1.q.out @@ -1 +1 @@ -FAILED: SemanticException [Error 10001]: Table not found default.NonExistentTable +FAILED: SemanticException [Error 10001]: Table not found default.nonexistenttable diff --git ql/src/test/results/clientnegative/drop_database_cascade.q.out ql/src/test/results/clientnegative/drop_database_cascade.q.out index 463dc7f035..0b576f63ba 100644 --- ql/src/test/results/clientnegative/drop_database_cascade.q.out +++ ql/src/test/results/clientnegative/drop_database_cascade.q.out @@ -12,12 +12,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:test_database PREHOOK: query: CREATE TABLE test_table (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: TEST_database@test_table PREHOOK: Output: database:test_database +PREHOOK: Output: test_database@test_table POSTHOOK: query: CREATE TABLE test_table (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: TEST_database@test_table POSTHOOK: Output: database:test_database +POSTHOOK: Output: test_database@test_table PREHOOK: query: CREATE FUNCTION test_func as 'org.apache.hadoop.hive.ql.udf.UDFAscii' PREHOOK: type: CREATEFUNCTION PREHOOK: Output: database:test_database @@ -76,4 +76,4 @@ PREHOOK: type: DESCFUNCTION POSTHOOK: query: describe function TEST_database.test_func POSTHOOK: type: DESCFUNCTION Function 'TEST_database.test_func' does not exist. -FAILED: SemanticException [Error 10072]: Database does not exist: TEST_database +FAILED: SemanticException [Error 10072]: Database does not exist: test_database diff --git ql/src/test/results/clientnegative/drop_table_failure1.q.out ql/src/test/results/clientnegative/drop_table_failure1.q.out index b744332e6c..9e7069c7b5 100644 --- ql/src/test/results/clientnegative/drop_table_failure1.q.out +++ ql/src/test/results/clientnegative/drop_table_failure1.q.out @@ -1 +1 @@ -FAILED: SemanticException [Error 10001]: Table not found UnknownTable +FAILED: SemanticException [Error 10001]: Table not found unknowntable diff --git ql/src/test/results/clientnegative/drop_view_failure2.q.out ql/src/test/results/clientnegative/drop_view_failure2.q.out index 428a2b787d..48273c495b 100644 --- ql/src/test/results/clientnegative/drop_view_failure2.q.out +++ ql/src/test/results/clientnegative/drop_view_failure2.q.out @@ -1 +1 @@ -FAILED: SemanticException [Error 10001]: Table not found UnknownView +FAILED: SemanticException [Error 10001]: Table not found unknownview diff --git ql/src/test/results/clientnegative/groupby_grouping_id1.q.out ql/src/test/results/clientnegative/groupby_grouping_id1.q.out index 912b8d3dea..2587c77b55 100644 --- ql/src/test/results/clientnegative/groupby_grouping_id1.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_id1.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10004]: Line 3:7 Invalid table alias or column reference 'GROUPING__ID': (possible column names are: key, val) diff --git ql/src/test/results/clientnegative/groupby_grouping_sets1.q.out ql/src/test/results/clientnegative/groupby_grouping_sets1.q.out index 12d061443e..ee72a3ac9b 100644 --- ql/src/test/results/clientnegative/groupby_grouping_sets1.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_sets1.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10211]: Empty grouping sets not allowed diff --git ql/src/test/results/clientnegative/groupby_grouping_sets2.q.out ql/src/test/results/clientnegative/groupby_grouping_sets2.q.out index 12d061443e..ee72a3ac9b 100644 --- ql/src/test/results/clientnegative/groupby_grouping_sets2.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_sets2.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10211]: Empty grouping sets not allowed diff --git ql/src/test/results/clientnegative/groupby_grouping_sets3.q.out ql/src/test/results/clientnegative/groupby_grouping_sets3.q.out index 815f04fcda..0951ce9c33 100644 --- ql/src/test/results/clientnegative/groupby_grouping_sets3.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_sets3.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException 4:46 [Error 10213]: Grouping sets expression is not in GROUP BY key. Error encountered near token 'b' diff --git ql/src/test/results/clientnegative/groupby_grouping_sets4.q.out ql/src/test/results/clientnegative/groupby_grouping_sets4.q.out index 4a595cb07b..0f0d46f649 100644 --- ql/src/test/results/clientnegative/groupby_grouping_sets4.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_sets4.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10025]: Line 4:7 Expression not in GROUP BY key 'a' diff --git ql/src/test/results/clientnegative/groupby_grouping_sets5.q.out ql/src/test/results/clientnegative/groupby_grouping_sets5.q.out index 4a595cb07b..0f0d46f649 100644 --- ql/src/test/results/clientnegative/groupby_grouping_sets5.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_sets5.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10025]: Line 4:7 Expression not in GROUP BY key 'a' diff --git ql/src/test/results/clientnegative/groupby_grouping_sets6.q.out ql/src/test/results/clientnegative/groupby_grouping_sets6.q.out index a2f399a754..3733911c34 100644 --- ql/src/test/results/clientnegative/groupby_grouping_sets6.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_sets6.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10226]: An additional MR job is introduced since the cardinality of grouping sets is more than hive.new.job.grouping.set.cardinality. This functionality is not supported with distincts. Either set hive.new.job.grouping.set.cardinality to a high number (higher than the number of rows per input row due to grouping sets in the query), or rewrite the query to not use distincts. The number of rows per input row due to grouping sets is 4 diff --git ql/src/test/results/clientnegative/groupby_grouping_sets7.q.out ql/src/test/results/clientnegative/groupby_grouping_sets7.q.out index 226de5ab1a..9866221f19 100644 --- ql/src/test/results/clientnegative/groupby_grouping_sets7.q.out +++ ql/src/test/results/clientnegative/groupby_grouping_sets7.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10225]: An additional MR job is introduced since the number of rows created per input row due to grouping sets is more than hive.new.job.grouping.set.cardinality. There is no need to handle skew separately. set hive.groupby.skewindata to false. The number of rows per input row due to grouping sets is 4 diff --git ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out index 96feec0d30..53c0f4e623 100644 --- ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out +++ ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out @@ -1,9 +1,9 @@ PREHOOK: query: create table nullConstraintCheck(i int NOT NULL enforced, j int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nullConstraintCheck +PREHOOK: Output: default@nullconstraintcheck POSTHOOK: query: create table nullConstraintCheck(i int NOT NULL enforced, j int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nullConstraintCheck +POSTHOOK: Output: default@nullconstraintcheck FAILED: DataConstraintViolationError org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either CHECK or NOT NULL constraint violated! diff --git ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out index 96feec0d30..53c0f4e623 100644 --- ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out +++ ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out @@ -1,9 +1,9 @@ PREHOOK: query: create table nullConstraintCheck(i int NOT NULL enforced, j int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nullConstraintCheck +PREHOOK: Output: default@nullconstraintcheck POSTHOOK: query: create table nullConstraintCheck(i int NOT NULL enforced, j int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nullConstraintCheck +POSTHOOK: Output: default@nullconstraintcheck FAILED: DataConstraintViolationError org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either CHECK or NOT NULL constraint violated! diff --git ql/src/test/results/clientnegative/load_wrong_fileformat.q.out ql/src/test/results/clientnegative/load_wrong_fileformat.q.out index 25b590cba2..e33e4cb0d3 100644 --- ql/src/test/results/clientnegative/load_wrong_fileformat.q.out +++ ql/src/test/results/clientnegative/load_wrong_fileformat.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE load_wrong_fileformat_T1(name STRING) STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@load_wrong_fileformat_T1 +PREHOOK: Output: default@load_wrong_fileformat_t1 POSTHOOK: query: CREATE TABLE load_wrong_fileformat_T1(name STRING) STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@load_wrong_fileformat_T1 +POSTHOOK: Output: default@load_wrong_fileformat_t1 FAILED: SemanticException Unable to load data to destination table. Error: The file that you are trying to load does not match the file format of the destination table. diff --git ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out index 1aec72349e..da8de58c7f 100644 --- ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out +++ ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(name STRING) STORED AS RCFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(name STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException Unable to load data to destination table. Error: The file that you are trying to load does not match the file format of the destination table. diff --git ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out index 7cf68205c0..c11ce99606 100644 --- ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out +++ ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException Unable to load data to destination table. Error: The file that you are trying to load does not match the file format of the destination table. diff --git ql/src/test/results/clientnegative/masking_mv.q.out ql/src/test/results/clientnegative/masking_mv.q.out index 02ff153254..57937e1882 100644 --- ql/src/test/results/clientnegative/masking_mv.q.out +++ ql/src/test/results/clientnegative/masking_mv.q.out @@ -56,7 +56,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.masking_test_view_n_mv + name: hive.default.masking_test_view_n_mv Select Operator expressions: _col0 (type: int) outputColumnNames: col1 @@ -105,7 +105,7 @@ STAGE PLANS: Create View columns: key int expanded text: select `masking_test_n_mv`.`key` from `default`.`masking_test_n_mv` - name: default.masking_test_view_n_mv + name: hive.default.masking_test_view_n_mv original text: select key from masking_test_n_mv rewrite enabled: true @@ -119,7 +119,7 @@ STAGE PLANS: Stage: Stage-9 Materialized View Update - name: default.masking_test_view_n_mv + name: hive.default.masking_test_view_n_mv retrieve and include: true Stage: Stage-3 diff --git ql/src/test/results/clientnegative/materialized_view_name_collusion.q.out ql/src/test/results/clientnegative/materialized_view_name_collusion.q.out index 9a2cb3e340..eccbc24643 100644 --- ql/src/test/results/clientnegative/materialized_view_name_collusion.q.out +++ ql/src/test/results/clientnegative/materialized_view_name_collusion.q.out @@ -16,4 +16,4 @@ POSTHOOK: type: CREATE_MATERIALIZED_VIEW POSTHOOK: Input: default@mvnc_basetable POSTHOOK: Output: database:default POSTHOOK: Output: default@mvnc_mat_view -FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: Table already exists: default.mvnc_mat_view +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: Table already exists: hive.default.mvnc_mat_view diff --git ql/src/test/results/clientnegative/merge_constraint_notnull.q.out ql/src/test/results/clientnegative/merge_constraint_notnull.q.out index 088e249a04..14b2d56081 100644 --- ql/src/test/results/clientnegative/merge_constraint_notnull.q.out +++ ql/src/test/results/clientnegative/merge_constraint_notnull.q.out @@ -22,13 +22,13 @@ clustered by (value) into 2 buckets stored as orc tblproperties ("transactional"="true") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testT +PREHOOK: Output: default@testt POSTHOOK: query: create table testT (key int NOT NULL enable, a1 string NOT NULL enforced, value string) clustered by (value) into 2 buckets stored as orc tblproperties ("transactional"="true") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testT +POSTHOOK: Output: default@testt PREHOOK: query: insert into testT values(2,'a1masking', 'valuemasking') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientnegative/mm_delete.q.out ql/src/test/results/clientnegative/mm_delete.q.out index d0fd905673..ed7bafba98 100644 --- ql/src/test/results/clientnegative/mm_delete.q.out +++ ql/src/test/results/clientnegative/mm_delete.q.out @@ -65,4 +65,4 @@ POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@mm_srcpart@ds=2008-04-08/hr=11 POSTHOOK: Lineage: mm_srcpart PARTITION(ds=2008-04-08,hr=11).key SCRIPT [] POSTHOOK: Lineage: mm_srcpart PARTITION(ds=2008-04-08,hr=11).value SCRIPT [] -FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table mm_srcpart that is insert-only transactional +FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table default.mm_srcpart that is insert-only transactional diff --git ql/src/test/results/clientnegative/mm_update.q.out ql/src/test/results/clientnegative/mm_update.q.out index 528d16269f..946ffd1598 100644 --- ql/src/test/results/clientnegative/mm_update.q.out +++ ql/src/test/results/clientnegative/mm_update.q.out @@ -55,4 +55,4 @@ POSTHOOK: Input: default@mm_srcpart@ds=2008-04-09/hr=11 2008-04-09 11 43 val_43 2008-04-09 11 413 val_413 2008-04-09 11 413 val_413 -FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table mm_srcpart that is insert-only transactional +FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table default.mm_srcpart that is insert-only transactional diff --git ql/src/test/results/clientnegative/orc_change_fileformat.q.out ql/src/test/results/clientnegative/orc_change_fileformat.q.out index e5429619aa..50ab775041 100644 --- ql/src/test/results/clientnegative/orc_change_fileformat.q.out +++ ql/src/test/results/clientnegative/orc_change_fileformat.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set fileformat textfile PREHOOK: type: ALTERTABLE_FILEFORMAT PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing file format (from ORC) is not supported for table default.src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing file format (from ORC) is not supported for table hive.default.src_orc diff --git ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out index 375795a4c1..1c53ad500d 100644 --- ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out +++ ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set fileformat textfile PREHOOK: type: ALTERTABLE_FILEFORMAT PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing file format (from ORC) is not supported for table default.src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing file format (from ORC) is not supported for table hive.default.src_orc diff --git ql/src/test/results/clientnegative/orc_change_serde.q.out ql/src/test/results/clientnegative/orc_change_serde.q.out index aea47137b5..d05b7acb1b 100644 --- ql/src/test/results/clientnegative/orc_change_serde.q.out +++ ql/src/test/results/clientnegative/orc_change_serde.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set serde 'org.apache.hadoop.hive.serde2.col PREHOOK: type: ALTERTABLE_SERIALIZER PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing SerDe (from OrcSerde) is not supported for table default.src_orc. File format may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing SerDe (from OrcSerde) is not supported for table hive.default.src_orc. File format may be incompatible diff --git ql/src/test/results/clientnegative/orc_change_serde_acid.q.out ql/src/test/results/clientnegative/orc_change_serde_acid.q.out index 60938e4892..b3b6904593 100644 --- ql/src/test/results/clientnegative/orc_change_serde_acid.q.out +++ ql/src/test/results/clientnegative/orc_change_serde_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set serde 'org.apache.hadoop.hive.serde2.col PREHOOK: type: ALTERTABLE_SERIALIZER PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing SerDe (from OrcSerde) is not supported for table default.src_orc. File format may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing SerDe (from OrcSerde) is not supported for table hive.default.src_orc. File format may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns1.q.out ql/src/test/results/clientnegative/orc_reorder_columns1.q.out index f415350c7c..0e7a694cbc 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns1.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint first PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table hive.default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out index 611fbb064d..d006abf1d4 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint first PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table hive.default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns2.q.out ql/src/test/results/clientnegative/orc_reorder_columns2.q.out index 2fb288c619..26daa26c73 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns2.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns2.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint after val PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table hive.default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out index 2deb291b85..53f66ba1e8 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint after val PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table hive.default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns1.q.out ql/src/test/results/clientnegative/orc_replace_columns1.q.out index f3fcae5878..f692d11096 100644 --- ql/src/test/results/clientnegative/orc_replace_columns1.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc replace columns (k int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table hive.default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out index 6c66155a86..749eb749b5 100644 --- ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc replace columns (k int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table hive.default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out index 5b3936dee5..e130ab978e 100644 --- ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out +++ ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out @@ -50,4 +50,4 @@ favnumber int PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@myparquettable_parted PREHOOK: Output: default@myparquettable_parted -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table default.myparquettable_parted. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table hive.default.myparquettable_parted. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/set_table_property.q.out ql/src/test/results/clientnegative/set_table_property.q.out index 2278456166..692a312e1f 100644 --- ql/src/test/results/clientnegative/set_table_property.q.out +++ ql/src/test/results/clientnegative/set_table_property.q.out @@ -1,9 +1,9 @@ PREHOOK: query: create table testTable(col1 int, col2 int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testTable +PREHOOK: Output: default@testtable POSTHOOK: query: create table testTable(col1 int, col2 int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testTable +POSTHOOK: Output: default@testtable FAILED: ParseException line 4:45 missing StringLiteral at ')' near '' diff --git ql/src/test/results/clientnegative/show_partitions1.q.out ql/src/test/results/clientnegative/show_partitions1.q.out index ca54088df5..eaea1fa7ed 100644 --- ql/src/test/results/clientnegative/show_partitions1.q.out +++ ql/src/test/results/clientnegative/show_partitions1.q.out @@ -1 +1 @@ -FAILED: SemanticException [Error 10001]: Table not found NonExistentTable +FAILED: SemanticException [Error 10001]: Table not found nonexistenttable diff --git ql/src/test/results/clientnegative/show_tableproperties1.q.out ql/src/test/results/clientnegative/show_tableproperties1.q.out index 29ed890c69..54a5d255f4 100644 --- ql/src/test/results/clientnegative/show_tableproperties1.q.out +++ ql/src/test/results/clientnegative/show_tableproperties1.q.out @@ -1 +1 @@ -FAILED: SemanticException [Error 10001]: Table not found default.NonExistentTable +FAILED: SemanticException [Error 10001]: Table not found default.nonexistenttable diff --git ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out index 226de5ab1a..9866221f19 100644 --- ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out +++ ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10225]: An additional MR job is introduced since the number of rows created per input row due to grouping sets is more than hive.new.job.grouping.set.cardinality. There is no need to handle skew separately. set hive.groupby.skewindata to false. The number of rows per input row due to grouping sets is 4 diff --git ql/src/test/results/clientnegative/subquery_corr_in_agg.q.out ql/src/test/results/clientnegative/subquery_corr_in_agg.q.out index ff17cd0d07..e0df57913e 100644 --- ql/src/test/results/clientnegative/subquery_corr_in_agg.q.out +++ ql/src/test/results/clientnegative/subquery_corr_in_agg.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table Part1 (PNum int, OrderOnHand int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Part1 +PREHOOK: Output: default@part1 POSTHOOK: query: create table Part1 (PNum int, OrderOnHand int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Part1 +POSTHOOK: Output: default@part1 PREHOOK: query: insert into Part1 values (3,6),(10,1),(8,0) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table @@ -19,11 +19,11 @@ POSTHOOK: Lineage: part1.pnum SCRIPT [] PREHOOK: query: create table Supply (PNum int, Qty int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Supply +PREHOOK: Output: default@supply POSTHOOK: query: create table Supply (PNum int, Qty int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Supply +POSTHOOK: Output: default@supply PREHOOK: query: insert into Supply values (3,4),(3,2),(10,1) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientnegative/unset_table_property.q.out ql/src/test/results/clientnegative/unset_table_property.q.out index 20378a1ce0..540653ec30 100644 --- ql/src/test/results/clientnegative/unset_table_property.q.out +++ ql/src/test/results/clientnegative/unset_table_property.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE testTable(col1 INT, col2 INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testTable +PREHOOK: Output: default@testtable POSTHOOK: query: CREATE TABLE testTable(col1 INT, col2 INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testTable +POSTHOOK: Output: default@testtable PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@testtable diff --git ql/src/test/results/clientnegative/unset_view_property.q.out ql/src/test/results/clientnegative/unset_view_property.q.out index b5038d971b..4ea94a55ac 100644 --- ql/src/test/results/clientnegative/unset_view_property.q.out +++ ql/src/test/results/clientnegative/unset_view_property.q.out @@ -2,13 +2,13 @@ PREHOOK: query: CREATE VIEW testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@testView +PREHOOK: Output: default@testview POSTHOOK: query: CREATE VIEW testView AS SELECT value FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@testView -POSTHOOK: Lineage: testView.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@testview +POSTHOOK: Lineage: testview.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propB'='200') PREHOOK: type: ALTERVIEW_PROPERTIES PREHOOK: Input: default@testview diff --git ql/src/test/results/clientnegative/update_non_acid_table.q.out ql/src/test/results/clientnegative/update_non_acid_table.q.out index 64164ba4ed..02946fc185 100644 --- ql/src/test/results/clientnegative/update_non_acid_table.q.out +++ ql/src/test/results/clientnegative/update_non_acid_table.q.out @@ -34,4 +34,4 @@ POSTHOOK: Input: default@not_an_acid_table -1070883071 0ruyd6Y50JpdGRf6HqD -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet -FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table not_an_acid_table that is not transactional +FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table default.not_an_acid_table that is not transactional diff --git ql/src/test/results/clientpositive/alter_change_db_location.q.out ql/src/test/results/clientpositive/alter_change_db_location.q.out index 4c2115334b..28b3e6f1e1 100644 --- ql/src/test/results/clientpositive/alter_change_db_location.q.out +++ ql/src/test/results/clientpositive/alter_change_db_location.q.out @@ -21,11 +21,11 @@ POSTHOOK: Input: database:newdb PREHOOK: query: create table tab_n13 (name string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:newdb -PREHOOK: Output: newDB@tab_n13 +PREHOOK: Output: newdb@tab_n13 POSTHOOK: query: create table tab_n13 (name string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:newdb -POSTHOOK: Output: newDB@tab_n13 +POSTHOOK: Output: newdb@tab_n13 PREHOOK: query: alter table tab_n13 rename to newName PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: newdb@tab_n13 @@ -33,5 +33,5 @@ PREHOOK: Output: newdb@tab_n13 POSTHOOK: query: alter table tab_n13 rename to newName POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n13 -POSTHOOK: Output: newDB@newName +POSTHOOK: Output: newdb@newname POSTHOOK: Output: newdb@tab_n13 diff --git ql/src/test/results/clientpositive/annotate_stats_table.q.out ql/src/test/results/clientpositive/annotate_stats_table.q.out index 9b9e31b214..38b3c6f93c 100644 --- ql/src/test/results/clientpositive/annotate_stats_table.q.out +++ ql/src/test/results/clientpositive/annotate_stats_table.q.out @@ -369,10 +369,10 @@ STAGE PLANS: Stage: Stage-7 Create Table columns: _c0 int - name: default.tmp_n0 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.tmp_n0 Stage: Stage-2 Map Reduce diff --git ql/src/test/results/clientpositive/annotate_stats_udtf.q.out ql/src/test/results/clientpositive/annotate_stats_udtf.q.out index c094fc1fea..b757a27568 100644 --- ql/src/test/results/clientpositive/annotate_stats_udtf.q.out +++ ql/src/test/results/clientpositive/annotate_stats_udtf.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table HIVE_20262 (a array) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@HIVE_20262 +PREHOOK: Output: default@hive_20262 POSTHOOK: query: create table HIVE_20262 (a array) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@HIVE_20262 +POSTHOOK: Output: default@hive_20262 PREHOOK: query: insert into HIVE_20262 select array(1) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/case_sensitivity.q.out ql/src/test/results/clientpositive/case_sensitivity.q.out index fdcf86b1da..caf872ff41 100644 --- ql/src/test/results/clientpositive/case_sensitivity.q.out +++ ql/src/test/results/clientpositive/case_sensitivity.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE DEST1_n129(Key INT, VALUE STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n129 +PREHOOK: Output: default@dest1_n129 POSTHOOK: query: CREATE TABLE DEST1_n129(Key INT, VALUE STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n129 +POSTHOOK: Output: default@dest1_n129 PREHOOK: query: EXPLAIN FROM SRC_THRIFT INSERT OVERWRITE TABLE dest1_n129 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0 diff --git ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out index f445ba3d3d..086a62b721 100644 --- ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out +++ ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table A_n18 (key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@A_n18 +PREHOOK: Output: default@a_n18 POSTHOOK: query: create table A_n18 (key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n18 +POSTHOOK: Output: default@a_n18 PREHOOK: query: insert into A_n18 select * from src PREHOOK: type: QUERY @@ -21,11 +21,11 @@ POSTHOOK: Lineage: a_n18.value SIMPLE [(src)src.FieldSchema(name:value, type:str PREHOOK: query: create table B_n14 (key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@B_n14 +PREHOOK: Output: default@b_n14 POSTHOOK: query: create table B_n14 (key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n14 +POSTHOOK: Output: default@b_n14 PREHOOK: query: insert into B_n14 select * from src order by key limit 10 diff --git ql/src/test/results/clientpositive/clusterctas.q.out ql/src/test/results/clientpositive/clusterctas.q.out index 9d76bc5903..0718f16905 100644 --- ql/src/test/results/clientpositive/clusterctas.q.out +++ ql/src/test/results/clientpositive/clusterctas.q.out @@ -79,10 +79,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string - name: default.x input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde name: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: hive.default.x table properties: transactional true diff --git ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out index b72c13ad22..617dda97c8 100644 --- ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out +++ ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE DEST1_n52(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n52 +PREHOOK: Output: default@dest1_n52 POSTHOOK: query: CREATE TABLE DEST1_n52(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n52 +POSTHOOK: Output: default@dest1_n52 PREHOOK: query: create table s_n129 as select * from src where key='10' PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out index 09ebed0c0a..ee82e76631 100644 --- ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out +++ ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out @@ -125,11 +125,11 @@ NULL NULL NULL 0 NULL PREHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@columnShortcutTable +PREHOOK: Output: default@columnshortcuttable POSTHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@columnShortcutTable +POSTHOOK: Output: default@columnshortcuttable PREHOOK: query: FROM src INSERT OVERWRITE TABLE columnShortcutTable SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/columnstats_partlvl.q.out ql/src/test/results/clientpositive/columnstats_partlvl.q.out index f12577c3c2..df1ffd1f1b 100644 --- ql/src/test/results/clientpositive/columnstats_partlvl.q.out +++ ql/src/test/results/clientpositive/columnstats_partlvl.q.out @@ -6,12 +6,12 @@ PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part +PREHOOK: Output: default@employee_part POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part +POSTHOOK: Output: default@employee_part PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0) PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out index e2946a227e..da04cd1e74 100644 --- ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out +++ ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out @@ -6,12 +6,12 @@ PREHOOK: query: CREATE TABLE Employee_Part_n0(employeeID int, employeeName Strin row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part_n0 +PREHOOK: Output: default@employee_part_n0 POSTHOOK: query: CREATE TABLE Employee_Part_n0(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part_n0 +POSTHOOK: Output: default@employee_part_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part_n0 partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -637,12 +637,12 @@ PREHOOK: query: CREATE TABLE Employee_n0(employeeID int, employeeName String) pa row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_n0 +PREHOOK: Output: default@employee_n0 POSTHOOK: query: CREATE TABLE Employee_n0(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_n0 +POSTHOOK: Output: default@employee_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_n0 partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/columnstats_tbllvl.q.out ql/src/test/results/clientpositive/columnstats_tbllvl.q.out index f22d15c2d3..a2c502662a 100644 --- ql/src/test/results/clientpositive/columnstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/columnstats_tbllvl.q.out @@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@UserVisits_web_text_none +PREHOOK: Output: default@uservisits_web_text_none POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( sourceIP string, destURL string, @@ -29,7 +29,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@UserVisits_web_text_none +POSTHOOK: Output: default@uservisits_web_text_none PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none PREHOOK: type: LOAD #### A masked pattern was here #### @@ -514,7 +514,7 @@ PREHOOK: query: CREATE TABLE UserVisits_in_dummy_db ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:dummydb -PREHOOK: Output: dummydb@UserVisits_in_dummy_db +PREHOOK: Output: dummydb@uservisits_in_dummy_db POSTHOOK: query: CREATE TABLE UserVisits_in_dummy_db ( sourceIP string, destURL string, @@ -528,7 +528,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_in_dummy_db ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:dummydb -POSTHOOK: Output: dummydb@UserVisits_in_dummy_db +POSTHOOK: Output: dummydb@uservisits_in_dummy_db PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_in_dummy_db PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/correlationoptimizer5.q.out ql/src/test/results/clientpositive/correlationoptimizer5.q.out index 2e9e6027ae..2a19232fdd 100644 --- ql/src/test/results/clientpositive/correlationoptimizer5.q.out +++ ql/src/test/results/clientpositive/correlationoptimizer5.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n19(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n19 +PREHOOK: Output: default@t1_n19 POSTHOOK: query: CREATE TABLE T1_n19(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n19 +POSTHOOK: Output: default@t1_n19 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1_n19 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@t1_n19 PREHOOK: query: CREATE TABLE T2_n11(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n11 +PREHOOK: Output: default@t2_n11 POSTHOOK: query: CREATE TABLE T2_n11(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n11 +POSTHOOK: Output: default@t2_n11 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T2_n11 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -33,11 +33,11 @@ POSTHOOK: Output: default@t2_n11 PREHOOK: query: CREATE TABLE T3_n5(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n5 +PREHOOK: Output: default@t3_n5 POSTHOOK: query: CREATE TABLE T3_n5(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n5 +POSTHOOK: Output: default@t3_n5 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE T3_n5 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -49,11 +49,11 @@ POSTHOOK: Output: default@t3_n5 PREHOOK: query: CREATE TABLE T4_n1(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n1 +PREHOOK: Output: default@t4_n1 POSTHOOK: query: CREATE TABLE T4_n1(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n1 +POSTHOOK: Output: default@t4_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE T4_n1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/create_view.q.out ql/src/test/results/clientpositive/create_view.q.out index 7414d4749d..768006265d 100644 --- ql/src/test/results/clientpositive/create_view.q.out +++ ql/src/test/results/clientpositive/create_view.q.out @@ -175,7 +175,7 @@ STAGE PLANS: Create View columns: valoo string expanded text: SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `default`.`src` WHERE `src`.`key`=86) `view0` - name: default.view0 + name: hive.default.view0 original text: SELECT upper(value) FROM src WHERE key=86 PREHOOK: query: EXPLAIN diff --git ql/src/test/results/clientpositive/cross_join_merge.q.out ql/src/test/results/clientpositive/cross_join_merge.q.out index 54e68cce20..44df17504f 100644 --- ql/src/test/results/clientpositive/cross_join_merge.q.out +++ ql/src/test/results/clientpositive/cross_join_merge.q.out @@ -1,7 +1,35 @@ -Warning: Shuffle Join JOIN[11][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product -Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product -PREHOOK: query: explain -select src1.key from src src1 join src src2 join src src3 +PREHOOK: query: create table A_n8 as +select * from src +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@a_n8 +POSTHOOK: query: create table A_n8 as +select * from src +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@a_n8 +POSTHOOK: Lineage: a_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: a_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: create table B_n6 as +select * from src +limit 10 +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@b_n6 +POSTHOOK: query: create table B_n6 as +select * from src +limit 10 +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@b_n6 +POSTHOOK: Lineage: b_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: b_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain select * from A_n8 join B_n6 PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/cross_product_check_2.q.out ql/src/test/results/clientpositive/cross_product_check_2.q.out new file mode 100644 index 0000000000..3452e95d6e --- /dev/null +++ ql/src/test/results/clientpositive/cross_product_check_2.q.out @@ -0,0 +1,741 @@ +PREHOOK: query: create table A_n2 as +select * from src +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@a_n2 +POSTHOOK: query: create table A_n2 as +select * from src +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@a_n2 +POSTHOOK: Lineage: a_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: a_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: create table B_n2 as +select * from src order by key +limit 10 +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@b_n2 +POSTHOOK: query: create table B_n2 as +select * from src order by key +limit 10 +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@b_n2 +POSTHOOK: Lineage: b_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: b_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain select * from A_n2 join B_n2 +PREHOOK: type: QUERY +PREHOOK: Input: default@a_n2 +PREHOOK: Input: default@b_n2 +#### A masked pattern was here #### +POSTHOOK: query: explain select * from A_n2 join B_n2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@a_n2 +POSTHOOK: Input: default@b_n2 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:b_n2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:b_n2 + TableScan + alias: b_n2 + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: a_n2 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 5000 Data size: 1770000 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 5000 Data size: 1770000 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[20][bigTable=?] in task 'Stage-5:MAPRED' is a cross product +PREHOOK: query: explain select * from B_n2 d1 join B_n2 d2 on d1.key = d2.key join A_n2 +PREHOOK: type: QUERY +PREHOOK: Input: default@a_n2 +PREHOOK: Input: default@b_n2 +#### A masked pattern was here #### +POSTHOOK: query: explain select * from B_n2 d1 join B_n2 d2 on d1.key = d2.key join A_n2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@a_n2 +POSTHOOK: Input: default@b_n2 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-7 is a root stage + Stage-5 depends on stages: Stage-7 + Stage-0 depends on stages: Stage-5 + +STAGE PLANS: + Stage: Stage-7 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:d1 + Fetch Operator + limit: -1 + $hdt$_2:a_n2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:d1 + TableScan + alias: d1 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + $hdt$_2:a_n2 + TableScan + alias: a_n2 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-5 + Map Reduce + Map Operator Tree: + TableScan + alias: d2 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 10 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 20 Data size: 7040 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 10000 Data size: 5300000 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 10000 Data size: 5300000 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[27][bigTable=?] in task 'Stage-5:MAPRED' is a cross product +PREHOOK: query: explain select * from A_n2 join + (select d1.key + from B_n2 d1 join B_n2 d2 on d1.key = d2.key + where 1 = 1 group by d1.key) od1 +PREHOOK: type: QUERY +PREHOOK: Input: default@a_n2 +PREHOOK: Input: default@b_n2 +#### A masked pattern was here #### +POSTHOOK: query: explain select * from A_n2 join + (select d1.key + from B_n2 d1 join B_n2 d2 on d1.key = d2.key + where 1 = 1 group by d1.key) od1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@a_n2 +POSTHOOK: Input: default@b_n2 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-8 is a root stage + Stage-3 depends on stages: Stage-8 + Stage-7 depends on stages: Stage-3 + Stage-5 depends on stages: Stage-7 + Stage-0 depends on stages: Stage-5 + +STAGE PLANS: + Stage: Stage-8 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:$hdt$_1:d1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:$hdt$_1:d1 + TableScan + alias: d1 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: d2 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 20 Data size: 1720 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: _col0 (type: string) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Local Work: + Map Reduce Local Work + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-7 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:a_n2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:a_n2 + TableScan + alias: a_n2 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-5 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 2500 Data size: 660000 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2500 Data size: 660000 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[23][bigTable=?] in task 'Stage-5:MAPRED' is a cross product +Warning: Map Join MAPJOIN[24][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain select * from A_n2 join (select d1.key from B_n2 d1 join B_n2 d2 where 1 = 1 group by d1.key) od1 +PREHOOK: type: QUERY +PREHOOK: Input: default@a_n2 +PREHOOK: Input: default@b_n2 +#### A masked pattern was here #### +POSTHOOK: query: explain select * from A_n2 join (select d1.key from B_n2 d1 join B_n2 d2 where 1 = 1 group by d1.key) od1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@a_n2 +POSTHOOK: Input: default@b_n2 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-8 is a root stage + Stage-3 depends on stages: Stage-8 + Stage-7 depends on stages: Stage-3 + Stage-5 depends on stages: Stage-7 + Stage-0 depends on stages: Stage-5 + +STAGE PLANS: + Stage: Stage-8 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:$hdt$_1:d1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:$hdt$_1:d1 + TableScan + alias: d1 + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: d2 + Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0 + Statistics: Num rows: 100 Data size: 8600 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: _col0 (type: string) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Local Work: + Map Reduce Local Work + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-7 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:a_n2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:a_n2 + TableScan + alias: a_n2 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-5 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 2500 Data size: 660000 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2500 Data size: 660000 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[46][bigTable=?] in task 'Stage-7:MAPRED' is a cross product +Warning: Map Join MAPJOIN[39][bigTable=?] in task 'Stage-6:MAPRED' is a cross product +Warning: Shuffle Join JOIN[22][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product +PREHOOK: query: explain select * from +(select A_n2.key from A_n2 group by key) ss join +(select d1.key from B_n2 d1 join B_n2 d2 on d1.key = d2.key where 1 = 1 group by d1.key) od1 +PREHOOK: type: QUERY +PREHOOK: Input: default@a_n2 +PREHOOK: Input: default@b_n2 +#### A masked pattern was here #### +POSTHOOK: query: explain select * from +(select A_n2.key from A_n2 group by key) ss join +(select d1.key from B_n2 d1 join B_n2 d2 on d1.key = d2.key where 1 = 1 group by d1.key) od1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@a_n2 +POSTHOOK: Input: default@b_n2 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-8 depends on stages: Stage-1, Stage-4 , consists of Stage-10, Stage-11, Stage-2 + Stage-10 has a backup stage: Stage-2 + Stage-6 depends on stages: Stage-10 + Stage-11 has a backup stage: Stage-2 + Stage-7 depends on stages: Stage-11 + Stage-2 + Stage-12 is a root stage + Stage-4 depends on stages: Stage-12 + Stage-0 depends on stages: Stage-6, Stage-7, Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: a_n2 + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: key (type: string) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-8 + Conditional Operator + + Stage: Stage-10 + Map Reduce Local Work + Alias -> Map Local Tables: + $INTNAME1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $INTNAME1 + TableScan + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-6 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1250 Data size: 216250 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1250 Data size: 216250 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-11 + Map Reduce Local Work + Alias -> Map Local Tables: + $INTNAME + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $INTNAME + TableScan + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-7 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1250 Data size: 216250 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1250 Data size: 216250 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: string) + TableScan + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1250 Data size: 216250 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1250 Data size: 216250 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-12 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:$hdt$_1:d1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:$hdt$_1:d1 + TableScan + alias: d1 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + + Stage: Stage-4 + Map Reduce + Map Operator Tree: + TableScan + alias: d2 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 20 Data size: 1720 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: _col0 (type: string) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Local Work: + Map Reduce Local Work + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + diff --git ql/src/test/results/clientpositive/ctas_colname.q.out ql/src/test/results/clientpositive/ctas_colname.q.out index 213a80986a..32a1c3c6db 100644 --- ql/src/test/results/clientpositive/ctas_colname.q.out +++ ql/src/test/results/clientpositive/ctas_colname.q.out @@ -80,10 +80,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string, _c1 double, _c2 string - name: default.summary input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.summary Stage: Stage-2 Stats Work @@ -293,10 +293,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string, rr int - name: default.x4 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.x4 Stage: Stage-2 Stats Work @@ -539,10 +539,10 @@ STAGE PLANS: Stage: Stage-5 Create Table columns: key string, value string, lead1 string - name: default.x5 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.x5 Stage: Stage-3 Stats Work @@ -742,10 +742,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string, _c1 double - name: default.x6 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.x6 Stage: Stage-2 Stats Work @@ -953,10 +953,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: _col0 string, _col1 string, _c1 bigint - name: default.x7 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.x7 Stage: Stage-2 Stats Work @@ -1442,10 +1442,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: _col0 string, _col1 string, _c1 bigint - name: default.x8 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.x8 Stage: Stage-2 Stats Work @@ -1634,10 +1634,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: _c0 string, key string - name: default.x9 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.x9 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/ctas_uses_database_location.q.out ql/src/test/results/clientpositive/ctas_uses_database_location.q.out index a497af69e9..5b995ae4f2 100644 --- ql/src/test/results/clientpositive/ctas_uses_database_location.q.out +++ ql/src/test/results/clientpositive/ctas_uses_database_location.q.out @@ -101,10 +101,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: db1.table_db1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.db1.table_db1 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/decimal_join2.q.out ql/src/test/results/clientpositive/decimal_join2.q.out index 59d0672132..8fbbb6dbed 100644 --- ql/src/test/results/clientpositive/decimal_join2.q.out +++ ql/src/test/results/clientpositive/decimal_join2.q.out @@ -12,14 +12,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3_txt +PREHOOK: Output: default@decimal_3_txt POSTHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3_txt +POSTHOOK: Output: default@decimal_3_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,12 +32,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_3_n0 STORED AS ORC AS SELECT * FROM DECIMAL PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_3_txt PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3_n0 +PREHOOK: Output: default@decimal_3_n0 POSTHOOK: query: CREATE TABLE DECIMAL_3_n0 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_3_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3_n0 +POSTHOOK: Output: default@decimal_3_n0 POSTHOOK: Lineage: decimal_3_n0.key SIMPLE [(decimal_3_txt)decimal_3_txt.FieldSchema(name:key, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: decimal_3_n0.value SIMPLE [(decimal_3_txt)decimal_3_txt.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: EXPLAIN diff --git ql/src/test/results/clientpositive/decimal_precision.q.out ql/src/test/results/clientpositive/decimal_precision.q.out index 8aea2dca3f..c42a985dd8 100644 --- ql/src/test/results/clientpositive/decimal_precision.q.out +++ ql/src/test/results/clientpositive/decimal_precision.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION_n0 +PREHOOK: Output: default@decimal_precision_n0 POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_n0(`dec` decimal(20,10)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION_n0 +POSTHOOK: Output: default@decimal_precision_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -645,11 +645,11 @@ POSTHOOK: Output: default@decimal_precision_n0 PREHOOK: query: CREATE TABLE DECIMAL_PRECISION_n0(`dec` decimal(38,18)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION_n0 +PREHOOK: Output: default@decimal_precision_n0 POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_n0(`dec` decimal(38,18)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION_n0 +POSTHOOK: Output: default@decimal_precision_n0 PREHOOK: query: INSERT INTO DECIMAL_PRECISION_n0 VALUES(98765432109876543210.12345), (98765432109876543210.12345) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/decimal_udf.q.out ql/src/test/results/clientpositive/decimal_udf.q.out index f00ef023e4..38a5c32016 100644 --- ql/src/test/results/clientpositive/decimal_udf.q.out +++ ql/src/test/results/clientpositive/decimal_udf.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF +PREHOOK: Output: default@decimal_udf POSTHOOK: query: CREATE TABLE DECIMAL_UDF (key decimal(20,10), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF +POSTHOOK: Output: default@decimal_udf PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/decimal_udf2.q.out ql/src/test/results/clientpositive/decimal_udf2.q.out index eb98a07c8e..364d27039d 100644 --- ql/src/test/results/clientpositive/decimal_udf2.q.out +++ ql/src/test/results/clientpositive/decimal_udf2.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF2 +PREHOOK: Output: default@decimal_udf2 POSTHOOK: query: CREATE TABLE DECIMAL_UDF2 (key decimal(20,10), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF2 +POSTHOOK: Output: default@decimal_udf2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out index ca7ce6f0e4..d42434e08d 100644 --- ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out @@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@UserVisits_web_text_none_n0 +PREHOOK: Output: default@uservisits_web_text_none_n0 POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( sourceIP string, destURL string, @@ -29,7 +29,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@UserVisits_web_text_none_n0 +POSTHOOK: Output: default@uservisits_web_text_none_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -497,7 +497,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:test -PREHOOK: Output: test@UserVisits_web_text_none_n0 +PREHOOK: Output: test@uservisits_web_text_none_n0 POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( sourceIP string, destURL string, @@ -511,7 +511,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:test -POSTHOOK: Output: test@UserVisits_web_text_none_n0 +POSTHOOK: Output: test@uservisits_web_text_none_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out index 65f8cca39b..43631c8047 100644 --- ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out +++ ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out @@ -169,8 +169,8 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: __time timestamp with local time zone, cstring1 string, cstring2 string, cdouble double, cfloat float, ctinyint tinyint, csmallint smallint, cint int, cbigint bigint, cboolean1 boolean, cboolean2 boolean - name: default.druid_partitioned_table storage handler: org.apache.hadoop.hive.druid.DruidStorageHandler + name: hive.default.druid_partitioned_table table properties: druid.query.granularity MINUTE druid.segment.granularity HOUR diff --git ql/src/test/results/clientpositive/encrypted/encryption_ctas.q.out ql/src/test/results/clientpositive/encrypted/encryption_ctas.q.out index d8d30b405c..01bdf314c1 100644 --- ql/src/test/results/clientpositive/encrypted/encryption_ctas.q.out +++ ql/src/test/results/clientpositive/encrypted/encryption_ctas.q.out @@ -20,14 +20,14 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Input: hdfs://### HDFS PATH ### PREHOOK: Output: database:testct -PREHOOK: Output: testCT@encrypted_tablectas +PREHOOK: Output: testct@encrypted_tablectas #### A masked pattern was here #### AS SELECT * from src where key = 100 limit 1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Input: hdfs://### HDFS PATH ### POSTHOOK: Output: database:testct -POSTHOOK: Output: testCT@encrypted_tablectas +POSTHOOK: Output: testct@encrypted_tablectas PREHOOK: query: select * from testCT.encrypted_tablectas PREHOOK: type: QUERY PREHOOK: Input: testct@encrypted_tablectas diff --git ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out index f9c7060315..0d9b1124a9 100644 --- ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out +++ ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out @@ -12,26 +12,26 @@ PREHOOK: query: create table encryptedTable_n0(value string) PREHOOK: type: CREATETABLE PREHOOK: Input: hdfs://### HDFS PATH ### PREHOOK: Output: database:default -PREHOOK: Output: default@encryptedTable_n0 +PREHOOK: Output: default@encryptedtable_n0 POSTHOOK: query: create table encryptedTable_n0(value string) partitioned by (key string) clustered by (value) into 2 buckets stored as orc #### A masked pattern was here #### POSTHOOK: type: CREATETABLE POSTHOOK: Input: hdfs://### HDFS PATH ### POSTHOOK: Output: database:default -POSTHOOK: Output: default@encryptedTable_n0 +POSTHOOK: Output: default@encryptedtable_n0 Encryption key created: 'key_1' Encryption zone created: '/build/ql/test/data/warehouse/encryptedTable' using key: 'key_1' PREHOOK: query: create table unencryptedTable_n0(value string) partitioned by (key string) clustered by (value) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@unencryptedTable_n0 +PREHOOK: Output: default@unencryptedtable_n0 POSTHOOK: query: create table unencryptedTable_n0(value string) partitioned by (key string) clustered by (value) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@unencryptedTable_n0 +POSTHOOK: Output: default@unencryptedtable_n0 PREHOOK: query: insert into table encryptedTable_n0 partition (key) values ('val_501', '501'), ('val_502', '502') diff --git ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out index 500b769ef4..0029a42cda 100644 --- ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out +++ ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out @@ -12,26 +12,26 @@ PREHOOK: query: create table encryptedTable(key string, PREHOOK: type: CREATETABLE PREHOOK: Input: hdfs://### HDFS PATH ### PREHOOK: Output: database:default -PREHOOK: Output: default@encryptedTable +PREHOOK: Output: default@encryptedtable POSTHOOK: query: create table encryptedTable(key string, value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc #### A masked pattern was here #### POSTHOOK: type: CREATETABLE POSTHOOK: Input: hdfs://### HDFS PATH ### POSTHOOK: Output: database:default -POSTHOOK: Output: default@encryptedTable +POSTHOOK: Output: default@encryptedtable Encryption key created: 'key_1' Encryption zone created: '/build/ql/test/data/warehouse/encryptedTable' using key: 'key_1' PREHOOK: query: create table unencryptedTable(key string, value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@unencryptedTable +PREHOOK: Output: default@unencryptedtable POSTHOOK: query: create table unencryptedTable(key string, value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@unencryptedTable +POSTHOOK: Output: default@unencryptedtable PREHOOK: query: insert into table encryptedTable partition (ds='today') values ('501', 'val_501'), diff --git ql/src/test/results/clientpositive/explain_ddl.q.out ql/src/test/results/clientpositive/explain_ddl.q.out index aea46d304f..1022c1e557 100644 --- ql/src/test/results/clientpositive/explain_ddl.q.out +++ ql/src/test/results/clientpositive/explain_ddl.q.out @@ -2,14 +2,14 @@ PREHOOK: query: CREATE VIEW V1_n0 AS SELECT key, value from src PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n0 +PREHOOK: Output: default@v1_n0 POSTHOOK: query: CREATE VIEW V1_n0 AS SELECT key, value from src POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n0 -POSTHOOK: Lineage: V1_n0.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V1_n0.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v1_n0 +POSTHOOK: Lineage: v1_n0.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v1_n0.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select count(*) from V1_n0 where key > 0 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -25,12 +25,12 @@ PREHOOK: query: CREATE TABLE M1 AS SELECT key, value from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: CREATE TABLE M1 AS SELECT key, value from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 POSTHOOK: Lineage: m1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: m1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select count(*) from M1 where key > 0 @@ -46,12 +46,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -81,7 +81,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -129,10 +129,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.M1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.m1 Stage: Stage-2 Stats Work @@ -140,7 +140,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.M1 + Table: default.m1 Stage: Stage-3 Map Reduce @@ -152,7 +152,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-5 Map Reduce @@ -164,7 +164,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-6 Move Operator @@ -176,12 +176,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from M1 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@m1 PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from M1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@m1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -211,7 +211,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -259,10 +259,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.M1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.m1 Stage: Stage-2 Stats Work @@ -270,7 +270,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.M1 + Table: default.m1 Stage: Stage-3 Map Reduce @@ -282,7 +282,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-5 Map Reduce @@ -294,7 +294,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-6 Move Operator @@ -307,13 +307,13 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Input: default@v1_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from V1_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Input: default@v1_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -345,7 +345,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -393,10 +393,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.M1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.m1 Stage: Stage-2 Stats Work @@ -404,7 +404,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.M1 + Table: default.m1 Stage: Stage-3 Map Reduce @@ -416,7 +416,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-5 Map Reduce @@ -428,7 +428,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-6 Move Operator @@ -440,12 +440,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE V1_n0 AS select * from M1 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@m1 PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n0 +PREHOOK: Output: default@v1_n0 POSTHOOK: query: EXPLAIN CREATE TABLE V1_n0 AS select * from M1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@m1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n0 +POSTHOOK: Output: default@v1_n0 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -475,7 +475,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.V1_n0 + name: default.v1_n0 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -523,10 +523,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.V1_n0 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.v1_n0 Stage: Stage-2 Stats Work @@ -534,7 +534,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.V1_n0 + Table: default.v1_n0 Stage: Stage-3 Map Reduce @@ -546,7 +546,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.V1_n0 + name: default.v1_n0 Stage: Stage-5 Map Reduce @@ -558,7 +558,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.V1_n0 + name: default.v1_n0 Stage: Stage-6 Move Operator @@ -570,12 +570,12 @@ PREHOOK: query: EXPLAIN CREATE VIEW V1_n0 AS select * from M1 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@m1 PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n0 +PREHOOK: Output: default@v1_n0 POSTHOOK: query: EXPLAIN CREATE VIEW V1_n0 AS select * from M1 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@m1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n0 +POSTHOOK: Output: default@v1_n0 STAGE DEPENDENCIES: Stage-1 is a root stage @@ -584,17 +584,17 @@ STAGE PLANS: Create View columns: key string, value string expanded text: select `m1`.`key`, `m1`.`value` from `default`.`M1` - name: default.V1_n0 + name: hive.default.v1_n0 original text: select * from M1 PREHOOK: query: EXPLAIN CREATE TABLE M1 LIKE src PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 LIKE src POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -605,18 +605,18 @@ STAGE PLANS: default output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat default serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe like: src - name: default.M1 + name: hive.default.m1 table properties: bucketing_version 2 PREHOOK: query: EXPLAIN CREATE TABLE M1 LIKE M1 PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 LIKE M1 POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -626,8 +626,8 @@ STAGE PLANS: default input format: org.apache.hadoop.mapred.TextInputFormat default output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat default serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - like: M1 - name: default.M1 + like: m1 + name: hive.default.m1 table properties: bucketing_version 2 @@ -645,7 +645,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Drop Table - table: M1 + table: m1 PREHOOK: query: select count(*) from M1 where key > 0 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/explain_logical.q.out ql/src/test/results/clientpositive/explain_logical.q.out index 56c47d6025..f33f03eaf7 100644 --- ql/src/test/results/clientpositive/explain_logical.q.out +++ ql/src/test/results/clientpositive/explain_logical.q.out @@ -2,27 +2,27 @@ PREHOOK: query: CREATE VIEW V1_n8 AS SELECT key, value from src PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n8 +PREHOOK: Output: default@v1_n8 POSTHOOK: query: CREATE VIEW V1_n8 AS SELECT key, value from src POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n8 -POSTHOOK: Lineage: V1_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V1_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v1_n8 +POSTHOOK: Lineage: v1_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v1_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V2_n3 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V2_n3 +PREHOOK: Output: default@v2_n3 POSTHOOK: query: CREATE VIEW V2_n3 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V2_n3 -POSTHOOK: Lineage: V2_n3.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] -POSTHOOK: Lineage: V2_n3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V2_n3.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v2_n3 +POSTHOOK: Lineage: v2_n3.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: v2_n3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v2_n3.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V3_n1 AS SELECT src1.key, src2.value FROM V2_n3 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -31,7 +31,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@srcpart PREHOOK: Input: default@v2_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@V3_n1 +PREHOOK: Output: default@v3_n1 POSTHOOK: query: CREATE VIEW V3_n1 AS SELECT src1.key, src2.value FROM V2_n3 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -40,9 +40,9 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v2_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V3_n1 -POSTHOOK: Lineage: V3_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V3_n1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v3_n1 +POSTHOOK: Lineage: v3_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v3_n1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V4_n1 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n8 src1 JOIN V2_n3 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -52,7 +52,7 @@ PREHOOK: Input: default@srcpart PREHOOK: Input: default@v1_n8 PREHOOK: Input: default@v2_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@V4_n1 +PREHOOK: Output: default@v4_n1 POSTHOOK: query: CREATE VIEW V4_n1 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n8 src1 JOIN V2_n3 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -62,10 +62,10 @@ POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v1_n8 POSTHOOK: Input: default@v2_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V4_n1 -POSTHOOK: Lineage: V4_n1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V4_n1.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: V4_n1.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v4_n1 +POSTHOOK: Lineage: v4_n1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v4_n1.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: v4_n1.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN LOGICAL SELECT key, count(1) FROM srcpart WHERE ds IS NOT NULL GROUP BY key PREHOOK: type: QUERY @@ -558,16 +558,16 @@ PREHOOK: query: CREATE VIEW V5_n0 as SELECT * FROM srcpart where ds = '10' PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V5_n0 +PREHOOK: Output: default@v5_n0 POSTHOOK: query: CREATE VIEW V5_n0 as SELECT * FROM srcpart where ds = '10' POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V5_n0 -POSTHOOK: Lineage: V5_n0.ds SIMPLE [] -POSTHOOK: Lineage: V5_n0.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] -POSTHOOK: Lineage: V5_n0.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V5_n0.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v5_n0 +POSTHOOK: Lineage: v5_n0.ds SIMPLE [] +POSTHOOK: Lineage: v5_n0.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: v5_n0.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v5_n0.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN LOGICAL SELECT * FROM V5_n0 PREHOOK: type: QUERY PREHOOK: Input: default@srcpart diff --git ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out index 54f28df314..7698d04357 100644 --- ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out +++ ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE sales_HIVE_15647 (store_id INTEGER, store_number INTEGER, customer_id INTEGER) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@sales_HIVE_15647 +PREHOOK: Output: default@sales_hive_15647 POSTHOOK: query: CREATE TABLE sales_HIVE_15647 (store_id INTEGER, store_number INTEGER, customer_id INTEGER) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@sales_HIVE_15647 +POSTHOOK: Output: default@sales_hive_15647 PREHOOK: query: CREATE TABLE store_HIVE_15647 (store_id INTEGER, salad_bar BOOLEAN) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@store_HIVE_15647 +PREHOOK: Output: default@store_hive_15647 POSTHOOK: query: CREATE TABLE store_HIVE_15647 (store_id INTEGER, salad_bar BOOLEAN) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@store_HIVE_15647 +POSTHOOK: Output: default@store_hive_15647 PREHOOK: query: explain select count(*) from sales_HIVE_15647 as sales join store_HIVE_15647 as store on sales.store_id = store.store_id diff --git ql/src/test/results/clientpositive/groupby10.q.out ql/src/test/results/clientpositive/groupby10.q.out index 583f2fbc28..cd33bdba5c 100644 --- ql/src/test/results/clientpositive/groupby10.q.out +++ ql/src/test/results/clientpositive/groupby10.q.out @@ -17,11 +17,11 @@ POSTHOOK: Output: default@dest2 PREHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT +PREHOOK: Output: default@input POSTHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT +POSTHOOK: Output: default@input PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby13.q.out ql/src/test/results/clientpositive/groupby13.q.out index a32a654afa..14a3dd57c2 100644 --- ql/src/test/results/clientpositive/groupby13.q.out +++ ql/src/test/results/clientpositive/groupby13.q.out @@ -94,11 +94,11 @@ STAGE PLANS: PREHOOK: query: create table aGBY (i int, j string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@aGBY +PREHOOK: Output: default@agby POSTHOOK: query: create table aGBY (i int, j string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@aGBY +POSTHOOK: Output: default@agby PREHOOK: query: insert into aGBY values ( 1, 'a'),(2,'b') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/groupby7_map.q.out ql/src/test/results/clientpositive/groupby7_map.q.out index 08b0db51e1..e54d256141 100644 --- ql/src/test/results/clientpositive/groupby7_map.q.out +++ ql/src/test/results/clientpositive/groupby7_map.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n82 +PREHOOK: Output: default@dest1_n82 POSTHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n82 +POSTHOOK: Output: default@dest1_n82 PREHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n19 +PREHOOK: Output: default@dest2_n19 POSTHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n19 +POSTHOOK: Output: default@dest2_n19 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n82 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out index e17ef4e8da..f58cbb9fca 100644 --- ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n15 +PREHOOK: Output: default@dest1_n15 POSTHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n15 +POSTHOOK: Output: default@dest1_n15 PREHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n3 +PREHOOK: Output: default@dest2_n3 POSTHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n3 +POSTHOOK: Output: default@dest2_n3 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n15 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_map_skew.q.out ql/src/test/results/clientpositive/groupby7_map_skew.q.out index 4a06aa7830..a297d91a38 100644 --- ql/src/test/results/clientpositive/groupby7_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby7_map_skew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n21(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n21 +PREHOOK: Output: default@dest1_n21 POSTHOOK: query: CREATE TABLE DEST1_n21(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n21 +POSTHOOK: Output: default@dest1_n21 PREHOOK: query: CREATE TABLE DEST2_n5(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n5 +PREHOOK: Output: default@dest2_n5 POSTHOOK: query: CREATE TABLE DEST2_n5(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n5 +POSTHOOK: Output: default@dest2_n5 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n21 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_noskew.q.out ql/src/test/results/clientpositive/groupby7_noskew.q.out index d60d67e0a8..1aea59622a 100644 --- ql/src/test/results/clientpositive/groupby7_noskew.q.out +++ ql/src/test/results/clientpositive/groupby7_noskew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n101(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n101 +PREHOOK: Output: default@dest1_n101 POSTHOOK: query: CREATE TABLE DEST1_n101(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n101 +POSTHOOK: Output: default@dest1_n101 PREHOOK: query: CREATE TABLE DEST2_n28(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n28 +PREHOOK: Output: default@dest2_n28 POSTHOOK: query: CREATE TABLE DEST2_n28(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n28 +POSTHOOK: Output: default@dest2_n28 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n101 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out index 45a2afcebc..6ffff4469b 100644 --- ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n170(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n170 +PREHOOK: Output: default@dest1_n170 POSTHOOK: query: CREATE TABLE DEST1_n170(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n170 +POSTHOOK: Output: default@dest1_n170 PREHOOK: query: CREATE TABLE DEST2_n42(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n42 +PREHOOK: Output: default@dest2_n42 POSTHOOK: query: CREATE TABLE DEST2_n42(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n42 +POSTHOOK: Output: default@dest2_n42 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n170 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key ORDER BY SRC.key limit 10 diff --git ql/src/test/results/clientpositive/groupby8.q.out ql/src/test/results/clientpositive/groupby8.q.out index 59a2334014..302588d70d 100644 --- ql/src/test/results/clientpositive/groupby8.q.out +++ ql/src/test/results/clientpositive/groupby8.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n71(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n71 +PREHOOK: Output: default@dest1_n71 POSTHOOK: query: CREATE TABLE DEST1_n71(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n71 +POSTHOOK: Output: default@dest1_n71 PREHOOK: query: CREATE TABLE DEST2_n15(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n15 +PREHOOK: Output: default@dest2_n15 POSTHOOK: query: CREATE TABLE DEST2_n15(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n15 +POSTHOOK: Output: default@dest2_n15 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n71 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby8_map.q.out ql/src/test/results/clientpositive/groupby8_map.q.out index c004bcaf13..62f9247a74 100644 --- ql/src/test/results/clientpositive/groupby8_map.q.out +++ ql/src/test/results/clientpositive/groupby8_map.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n136(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n136 +PREHOOK: Output: default@dest1_n136 POSTHOOK: query: CREATE TABLE DEST1_n136(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n136 +POSTHOOK: Output: default@dest1_n136 PREHOOK: query: CREATE TABLE DEST2_n35(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n35 +PREHOOK: Output: default@dest2_n35 POSTHOOK: query: CREATE TABLE DEST2_n35(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n35 +POSTHOOK: Output: default@dest2_n35 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n136 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby8_map_skew.q.out ql/src/test/results/clientpositive/groupby8_map_skew.q.out index 6ae2b31edd..8c674f4ee5 100644 --- ql/src/test/results/clientpositive/groupby8_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby8_map_skew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n87(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n87 +PREHOOK: Output: default@dest1_n87 POSTHOOK: query: CREATE TABLE DEST1_n87(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n87 +POSTHOOK: Output: default@dest1_n87 PREHOOK: query: CREATE TABLE DEST2_n22(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n22 +PREHOOK: Output: default@dest2_n22 POSTHOOK: query: CREATE TABLE DEST2_n22(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n22 +POSTHOOK: Output: default@dest2_n22 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n87 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby8_noskew.q.out ql/src/test/results/clientpositive/groupby8_noskew.q.out index fdc5a60438..3071bacfc4 100644 --- ql/src/test/results/clientpositive/groupby8_noskew.q.out +++ ql/src/test/results/clientpositive/groupby8_noskew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n48(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n48 +PREHOOK: Output: default@dest1_n48 POSTHOOK: query: CREATE TABLE DEST1_n48(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n48 +POSTHOOK: Output: default@dest1_n48 PREHOOK: query: CREATE TABLE DEST2_n9(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n9 +PREHOOK: Output: default@dest2_n9 POSTHOOK: query: CREATE TABLE DEST2_n9(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n9 +POSTHOOK: Output: default@dest2_n9 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n48 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby9.q.out ql/src/test/results/clientpositive/groupby9.q.out index 8eaa2e9d1f..54d7cbed8d 100644 --- ql/src/test/results/clientpositive/groupby9.q.out +++ ql/src/test/results/clientpositive/groupby9.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n117(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n117 +PREHOOK: Output: default@dest1_n117 POSTHOOK: query: CREATE TABLE DEST1_n117(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n117 +POSTHOOK: Output: default@dest1_n117 PREHOOK: query: CREATE TABLE DEST2_n31(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n31 +PREHOOK: Output: default@dest2_n31 POSTHOOK: query: CREATE TABLE DEST2_n31(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n31 +POSTHOOK: Output: default@dest2_n31 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n117 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby_complex_types.q.out ql/src/test/results/clientpositive/groupby_complex_types.q.out index e784a5e04a..d3cb3400f6 100644 --- ql/src/test/results/clientpositive/groupby_complex_types.q.out +++ ql/src/test/results/clientpositive/groupby_complex_types.q.out @@ -1,27 +1,27 @@ PREHOOK: query: CREATE TABLE DEST1_n163(key ARRAY, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n163 +PREHOOK: Output: default@dest1_n163 POSTHOOK: query: CREATE TABLE DEST1_n163(key ARRAY, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n163 +POSTHOOK: Output: default@dest1_n163 PREHOOK: query: CREATE TABLE DEST2_n41(key MAP, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n41 +PREHOOK: Output: default@dest2_n41 POSTHOOK: query: CREATE TABLE DEST2_n41(key MAP, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n41 +POSTHOOK: Output: default@dest2_n41 PREHOOK: query: CREATE TABLE DEST3_n7(key STRUCT, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST3_n7 +PREHOOK: Output: default@dest3_n7 POSTHOOK: query: CREATE TABLE DEST3_n7(key STRUCT, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST3_n7 +POSTHOOK: Output: default@dest3_n7 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n163 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key) diff --git ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out index dd2ea4a357..8053e4ad2c 100644 --- ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n47(key ARRAY, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n47 +PREHOOK: Output: default@dest1_n47 POSTHOOK: query: CREATE TABLE DEST1_n47(key ARRAY, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n47 +POSTHOOK: Output: default@dest1_n47 PREHOOK: query: CREATE TABLE DEST2_n8(key MAP, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n8 +PREHOOK: Output: default@dest2_n8 POSTHOOK: query: CREATE TABLE DEST2_n8(key MAP, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n8 +POSTHOOK: Output: default@dest2_n8 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n47 SELECT ARRAY(SRC.key) as keyarray, COUNT(1) GROUP BY ARRAY(SRC.key) ORDER BY keyarray limit 10 diff --git ql/src/test/results/clientpositive/groupby_cube1.q.out ql/src/test/results/clientpositive/groupby_cube1.q.out index 0ac1490e34..f47319e323 100644 --- ql/src/test/results/clientpositive/groupby_cube1.q.out +++ ql/src/test/results/clientpositive/groupby_cube1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n82(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n82 +PREHOOK: Output: default@t1_n82 POSTHOOK: query: CREATE TABLE T1_n82(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n82 +POSTHOOK: Output: default@t1_n82 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n82 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -562,19 +562,19 @@ NULL 6 PREHOOK: query: CREATE TABLE T2_n51(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n51 +PREHOOK: Output: default@t2_n51 POSTHOOK: query: CREATE TABLE T2_n51(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n51 +POSTHOOK: Output: default@t2_n51 PREHOOK: query: CREATE TABLE T3_n16(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n16 +PREHOOK: Output: default@t3_n16 POSTHOOK: query: CREATE TABLE T3_n16(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n16 +POSTHOOK: Output: default@t3_n16 PREHOOK: query: EXPLAIN FROM T1_n82 INSERT OVERWRITE TABLE T2_n51 SELECT key, val, count(1) group by key, val with cube diff --git ql/src/test/results/clientpositive/groupby_duplicate_key.q.out ql/src/test/results/clientpositive/groupby_duplicate_key.q.out index 44e8ef6952..ea5796ad67 100644 --- ql/src/test/results/clientpositive/groupby_duplicate_key.q.out +++ ql/src/test/results/clientpositive/groupby_duplicate_key.q.out @@ -169,10 +169,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, dummy1 string, dummy2 string - name: default.dummy_n6 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.dummy_n6 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/groupby_grouping_id3.q.out ql/src/test/results/clientpositive/groupby_grouping_id3.q.out index cdc063b370..aa122cd4f4 100644 --- ql/src/test/results/clientpositive/groupby_grouping_id3.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_id3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n86(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n86 +PREHOOK: Output: default@t1_n86 POSTHOOK: query: CREATE TABLE T1_n86(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n86 +POSTHOOK: Output: default@t1_n86 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_n86 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out index 43ab99b9f1..4f21e790d0 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n41(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n41 +PREHOOK: Output: default@t1_n41 POSTHOOK: query: CREATE TABLE T1_n41(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n41 +POSTHOOK: Output: default@t1_n41 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n41 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out index 7831a49e95..1066792a64 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n81(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n81 +PREHOOK: Output: default@t1_n81 POSTHOOK: query: CREATE TABLE T1_n81(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n81 +POSTHOOK: Output: default@t1_n81 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n81 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -341,11 +341,11 @@ NULL NULL 23.0 PREHOOK: query: CREATE TABLE T2_n50(a STRING, b STRING, c int, d int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n50 +PREHOOK: Output: default@t2_n50 POSTHOOK: query: CREATE TABLE T2_n50(a STRING, b STRING, c int, d int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n50 +POSTHOOK: Output: default@t2_n50 PREHOOK: query: INSERT OVERWRITE TABLE T2_n50 SELECT a, b, c, c from T1_n81 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out index a08dd02490..e29ab4f91a 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n118(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n118 +PREHOOK: Output: default@t1_n118 POSTHOOK: query: CREATE TABLE T1_n118(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n118 +POSTHOOK: Output: default@t1_n118 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets1.txt' INTO TABLE T1_n118 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out index b61aba926d..9ca2fb196f 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n143(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n143 +PREHOOK: Output: default@t1_n143 POSTHOOK: query: CREATE TABLE T1_n143(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n143 +POSTHOOK: Output: default@t1_n143 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n143 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out index b6b4dcb339..6c57bd2d3f 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n24(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n24 +PREHOOK: Output: default@t1_n24 POSTHOOK: query: CREATE TABLE T1_n24(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n24 +POSTHOOK: Output: default@t1_n24 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n24 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out index f6571b4645..4926414194 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n75(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n75 +PREHOOK: Output: default@t1_n75 POSTHOOK: query: CREATE TABLE T1_n75(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n75 +POSTHOOK: Output: default@t1_n75 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n75 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out index 93e081b729..5179ec8125 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n64(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n64 +PREHOOK: Output: default@t1_n64 POSTHOOK: query: CREATE TABLE T1_n64(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n64 +POSTHOOK: Output: default@t1_n64 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_n64 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out index b4aa6d1dd0..0c99a03a77 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n141(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n141 +PREHOOK: Output: default@t1_n141 POSTHOOK: query: CREATE TABLE T1_n141(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n141 +POSTHOOK: Output: default@t1_n141 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n141 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out index 3a92e71a75..c6327786a3 100644 --- ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out +++ ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE DEST2_n31_2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n31_2 +PREHOOK: Output: default@dest2_n31_2 POSTHOOK: query: CREATE TABLE DEST2_n31_2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n31_2 +POSTHOOK: Output: default@dest2_n31_2 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST2_n31_2 SELECT SRC.key, SRC.value, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key, SRC.value diff --git ql/src/test/results/clientpositive/groupby_position.q.out ql/src/test/results/clientpositive/groupby_position.q.out index 17f02c9089..a1e251a9a5 100644 --- ql/src/test/results/clientpositive/groupby_position.q.out +++ ql/src/test/results/clientpositive/groupby_position.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE testTable1(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testTable1 +PREHOOK: Output: default@testtable1 POSTHOOK: query: CREATE TABLE testTable1(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testTable1 +POSTHOOK: Output: default@testtable1 PREHOOK: query: CREATE TABLE testTable2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testTable2 +PREHOOK: Output: default@testtable2 POSTHOOK: query: CREATE TABLE testTable2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testTable2 +POSTHOOK: Output: default@testtable2 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE testTable1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) WHERE SRC.key < 20 GROUP BY 1 diff --git ql/src/test/results/clientpositive/groupby_rollup1.q.out ql/src/test/results/clientpositive/groupby_rollup1.q.out index e7b61b4a33..efc0533799 100644 --- ql/src/test/results/clientpositive/groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/groupby_rollup1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n91(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n91 +PREHOOK: Output: default@t1_n91 POSTHOOK: query: CREATE TABLE T1_n91(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n91 +POSTHOOK: Output: default@t1_n91 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n91 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -395,19 +395,19 @@ NULL 6 PREHOOK: query: CREATE TABLE T2_n56(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n56 +PREHOOK: Output: default@t2_n56 POSTHOOK: query: CREATE TABLE T2_n56(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n56 +POSTHOOK: Output: default@t2_n56 PREHOOK: query: CREATE TABLE T3_n20(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n20 +PREHOOK: Output: default@t3_n20 POSTHOOK: query: CREATE TABLE T3_n20(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n20 +POSTHOOK: Output: default@t3_n20 PREHOOK: query: EXPLAIN FROM T1_n91 INSERT OVERWRITE TABLE T2_n56 SELECT key, val, count(1) group by key, val with rollup diff --git ql/src/test/results/clientpositive/groupby_sort_10.q.out ql/src/test/results/clientpositive/groupby_sort_10.q.out index 570d3eeeaf..227238c7f0 100644 --- ql/src/test/results/clientpositive/groupby_sort_10.q.out +++ ql/src/test/results/clientpositive/groupby_sort_10.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n149(key STRING, val STRING) PARTITIONED BY (ds CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n149 +PREHOOK: Output: default@t1_n149 POSTHOOK: query: CREATE TABLE T1_n149(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n149 +POSTHOOK: Output: default@t1_n149 PREHOOK: query: INSERT OVERWRITE TABLE T1_n149 PARTITION (ds='1') SELECT * from src where key = 0 or key = 11 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/groupby_sort_11.q.out ql/src/test/results/clientpositive/groupby_sort_11.q.out index 76d3c7c51a..fa7b149bed 100644 --- ql/src/test/results/clientpositive/groupby_sort_11.q.out +++ ql/src/test/results/clientpositive/groupby_sort_11.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n18(key STRING, val STRING) PARTITIONED BY (ds s CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n18 +PREHOOK: Output: default@t1_n18 POSTHOOK: query: CREATE TABLE T1_n18(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n18 +POSTHOOK: Output: default@t1_n18 PREHOOK: query: INSERT OVERWRITE TABLE T1_n18 PARTITION (ds='1') SELECT * from src where key < 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/groupby_sort_1_23.q.out ql/src/test/results/clientpositive/groupby_sort_1_23.q.out index 6498e2422d..3643232133 100644 --- ql/src/test/results/clientpositive/groupby_sort_1_23.q.out +++ ql/src/test/results/clientpositive/groupby_sort_1_23.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n80(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n80 +PREHOOK: Output: default@t1_n80 POSTHOOK: query: CREATE TABLE T1_n80(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n80 +POSTHOOK: Output: default@t1_n80 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n80 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n80.val SIMPLE [(t1_n80)t1_n80.FieldSchema(name:val, type: PREHOOK: query: CREATE TABLE outputTbl1_n18(key int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n18 +PREHOOK: Output: default@outputtbl1_n18 POSTHOOK: query: CREATE TABLE outputTbl1_n18(key int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n18 +POSTHOOK: Output: default@outputtbl1_n18 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl1_n18 SELECT key, count(1) FROM T1_n80 GROUP BY key @@ -480,11 +480,11 @@ POSTHOOK: Input: default@outputtbl1_n18 PREHOOK: query: CREATE TABLE outputTbl2_n5(key1 int, key2 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n5 +PREHOOK: Output: default@outputtbl2_n5 POSTHOOK: query: CREATE TABLE outputTbl2_n5(key1 int, key2 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n5 +POSTHOOK: Output: default@outputtbl2_n5 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl2_n5 SELECT key, val, count(1) FROM T1_n80 GROUP BY key, val @@ -1691,11 +1691,11 @@ POSTHOOK: Input: default@outputtbl1_n18 PREHOOK: query: CREATE TABLE outputTbl3_n2(key1 int, key2 int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl3_n2 +PREHOOK: Output: default@outputtbl3_n2 POSTHOOK: query: CREATE TABLE outputTbl3_n2(key1 int, key2 int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl3_n2 +POSTHOOK: Output: default@outputtbl3_n2 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl3_n2 SELECT 1, key, count(1) FROM T1_n80 GROUP BY 1, key @@ -2143,11 +2143,11 @@ POSTHOOK: Input: default@outputtbl3_n2 PREHOOK: query: CREATE TABLE outputTbl4_n2(key1 int, key2 int, key3 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl4_n2 +PREHOOK: Output: default@outputtbl4_n2 POSTHOOK: query: CREATE TABLE outputTbl4_n2(key1 int, key2 int, key3 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl4_n2 +POSTHOOK: Output: default@outputtbl4_n2 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl4_n2 SELECT key, 1, val, count(1) FROM T1_n80 GROUP BY key, 1, val @@ -4992,12 +4992,12 @@ PREHOOK: query: CREATE TABLE T2_n49(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n49 +PREHOOK: Output: default@t2_n49 POSTHOOK: query: CREATE TABLE T2_n49(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n49 +POSTHOOK: Output: default@t2_n49 PREHOOK: query: INSERT OVERWRITE TABLE T2_n49 select key, val from T1_n80 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n80 @@ -5773,11 +5773,11 @@ POSTHOOK: Input: default@outputtbl4_n2 PREHOOK: query: CREATE TABLE outputTbl5_n2(key1 int, key2 int, key3 string, key4 int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl5_n2 +PREHOOK: Output: default@outputtbl5_n2 POSTHOOK: query: CREATE TABLE outputTbl5_n2(key1 int, key2 int, key3 string, key4 int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl5_n2 +POSTHOOK: Output: default@outputtbl5_n2 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl5_n2 SELECT key, 1, val, 2, count(1) FROM T2_n49 GROUP BY key, 1, val, 2 @@ -7150,19 +7150,19 @@ POSTHOOK: Input: default@outputtbl4_n2 PREHOOK: query: CREATE TABLE DEST1_n80(key INT, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n80 +PREHOOK: Output: default@dest1_n80 POSTHOOK: query: CREATE TABLE DEST1_n80(key INT, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n80 +POSTHOOK: Output: default@dest1_n80 PREHOOK: query: CREATE TABLE DEST2_n18(key INT, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n18 +PREHOOK: Output: default@dest2_n18 POSTHOOK: query: CREATE TABLE DEST2_n18(key INT, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n18 +POSTHOOK: Output: default@dest2_n18 PREHOOK: query: EXPLAIN FROM T2_n49 INSERT OVERWRITE TABLE DEST1_n80 SELECT key, count(1) GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_2.q.out ql/src/test/results/clientpositive/groupby_sort_2.q.out index a6b2403f47..1c1eef927e 100644 --- ql/src/test/results/clientpositive/groupby_sort_2.q.out +++ ql/src/test/results/clientpositive/groupby_sort_2.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n51(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n51 +PREHOOK: Output: default@t1_n51 POSTHOOK: query: CREATE TABLE T1_n51(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n51 +POSTHOOK: Output: default@t1_n51 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n51 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n51.val SIMPLE [(t1_n51)t1_n51.FieldSchema(name:val, type: PREHOOK: query: CREATE TABLE outputTbl1_n10(val string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n10 +PREHOOK: Output: default@outputtbl1_n10 POSTHOOK: query: CREATE TABLE outputTbl1_n10(val string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n10 +POSTHOOK: Output: default@outputtbl1_n10 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n10 SELECT val, count(1) FROM T1_n51 GROUP BY val diff --git ql/src/test/results/clientpositive/groupby_sort_3.q.out ql/src/test/results/clientpositive/groupby_sort_3.q.out index e657a28396..e979e85300 100644 --- ql/src/test/results/clientpositive/groupby_sort_3.q.out +++ ql/src/test/results/clientpositive/groupby_sort_3.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n89(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n89 +PREHOOK: Output: default@t1_n89 POSTHOOK: query: CREATE TABLE T1_n89(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n89 +POSTHOOK: Output: default@t1_n89 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n89 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n89.val SIMPLE [(t1_n89)t1_n89.FieldSchema(name:val, type: PREHOOK: query: CREATE TABLE outputTbl1_n20(key string, val string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n20 +PREHOOK: Output: default@outputtbl1_n20 POSTHOOK: query: CREATE TABLE outputTbl1_n20(key string, val string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n20 +POSTHOOK: Output: default@outputtbl1_n20 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n20 SELECT key, val, count(1) FROM T1_n89 GROUP BY key, val @@ -201,11 +201,11 @@ POSTHOOK: Input: default@outputtbl1_n20 PREHOOK: query: CREATE TABLE outputTbl2_n7(key string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n7 +PREHOOK: Output: default@outputtbl2_n7 POSTHOOK: query: CREATE TABLE outputTbl2_n7(key string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n7 +POSTHOOK: Output: default@outputtbl2_n7 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl2_n7 SELECT key, count(1) FROM T1_n89 GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_4.q.out ql/src/test/results/clientpositive/groupby_sort_4.q.out index cadc717f68..3dc93ee96a 100644 --- ql/src/test/results/clientpositive/groupby_sort_4.q.out +++ ql/src/test/results/clientpositive/groupby_sort_4.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n133(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n133 +PREHOOK: Output: default@t1_n133 POSTHOOK: query: CREATE TABLE T1_n133(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n133 +POSTHOOK: Output: default@t1_n133 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n133 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n133.val SIMPLE [(t1_n133)t1_n133.FieldSchema(name:val, ty PREHOOK: query: CREATE TABLE outputTbl1_n31(key STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n31 +PREHOOK: Output: default@outputtbl1_n31 POSTHOOK: query: CREATE TABLE outputTbl1_n31(key STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n31 +POSTHOOK: Output: default@outputtbl1_n31 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n31 SELECT key, count(1) FROM T1_n133 GROUP BY key @@ -185,11 +185,11 @@ POSTHOOK: Input: default@outputtbl1_n31 PREHOOK: query: CREATE TABLE outputTbl2_n8(key STRING, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n8 +PREHOOK: Output: default@outputtbl2_n8 POSTHOOK: query: CREATE TABLE outputTbl2_n8(key STRING, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n8 +POSTHOOK: Output: default@outputtbl2_n8 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl2_n8 SELECT key, val, count(1) FROM T1_n133 GROUP BY key, val diff --git ql/src/test/results/clientpositive/groupby_sort_5.q.out ql/src/test/results/clientpositive/groupby_sort_5.q.out index 90312062f9..215c47fefb 100644 --- ql/src/test/results/clientpositive/groupby_sort_5.q.out +++ ql/src/test/results/clientpositive/groupby_sort_5.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n6 +PREHOOK: Output: default@t1_n6 POSTHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n6 +POSTHOOK: Output: default@t1_n6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n6 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n6.val SIMPLE [(t1_n6)t1_n6.FieldSchema(name:val, type:str PREHOOK: query: CREATE TABLE outputTbl1_n5(key STRING, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n5 +PREHOOK: Output: default@outputtbl1_n5 POSTHOOK: query: CREATE TABLE outputTbl1_n5(key STRING, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n5 +POSTHOOK: Output: default@outputtbl1_n5 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n5 SELECT key, val, count(1) FROM T1_n6 GROUP BY key, val @@ -210,12 +210,12 @@ PREHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val, key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n6 +PREHOOK: Output: default@t1_n6 POSTHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val, key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n6 +POSTHOOK: Output: default@t1_n6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n6 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -410,12 +410,12 @@ PREHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n6 +PREHOOK: Output: default@t1_n6 POSTHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n6 +POSTHOOK: Output: default@t1_n6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n6 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -437,11 +437,11 @@ POSTHOOK: Lineage: t1_n6.val SIMPLE [(t1_n6)t1_n6.FieldSchema(name:val, type:str PREHOOK: query: CREATE TABLE outputTbl2_n1(key STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n1 +PREHOOK: Output: default@outputtbl2_n1 POSTHOOK: query: CREATE TABLE outputTbl2_n1(key STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n1 +POSTHOOK: Output: default@outputtbl2_n1 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl2_n1 SELECT key, count(1) FROM T1_n6 GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_6.q.out ql/src/test/results/clientpositive/groupby_sort_6.q.out index 69306412a7..1aaa676b8c 100644 --- ql/src/test/results/clientpositive/groupby_sort_6.q.out +++ ql/src/test/results/clientpositive/groupby_sort_6.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE T1_n61(key STRING, val STRING) PARTITIONED BY (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n61 +PREHOOK: Output: default@t1_n61 POSTHOOK: query: CREATE TABLE T1_n61(key STRING, val STRING) PARTITIONED BY (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n61 +POSTHOOK: Output: default@t1_n61 PREHOOK: query: CREATE TABLE outputTbl1_n15(key int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n15 +PREHOOK: Output: default@outputtbl1_n15 POSTHOOK: query: CREATE TABLE outputTbl1_n15(key int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n15 +POSTHOOK: Output: default@outputtbl1_n15 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl1_n15 SELECT key, count(1) FROM T1_n61 where ds = '1' GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_7.q.out ql/src/test/results/clientpositive/groupby_sort_7.q.out index a0a193d720..df62e7a11d 100644 --- ql/src/test/results/clientpositive/groupby_sort_7.q.out +++ ql/src/test/results/clientpositive/groupby_sort_7.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n104(key STRING, val STRING) PARTITIONED BY (ds CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n104 +PREHOOK: Output: default@t1_n104 POSTHOOK: query: CREATE TABLE T1_n104(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n104 +POSTHOOK: Output: default@t1_n104 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n104 PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,11 +32,11 @@ POSTHOOK: Lineage: t1_n104 PARTITION(ds=1).val SIMPLE [(t1_n104)t1_n104.FieldSch PREHOOK: query: CREATE TABLE outputTbl1_n26(key STRING, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n26 +PREHOOK: Output: default@outputtbl1_n26 POSTHOOK: query: CREATE TABLE outputTbl1_n26(key STRING, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n26 +POSTHOOK: Output: default@outputtbl1_n26 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n26 SELECT key, val, count(1) FROM T1_n104 where ds = '1' GROUP BY key, val diff --git ql/src/test/results/clientpositive/groupby_sort_8.q.out ql/src/test/results/clientpositive/groupby_sort_8.q.out index b5f581e6e6..983693204f 100644 --- ql/src/test/results/clientpositive/groupby_sort_8.q.out +++ ql/src/test/results/clientpositive/groupby_sort_8.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n45(key STRING, val STRING) PARTITIONED BY (ds s CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n45 +PREHOOK: Output: default@t1_n45 POSTHOOK: query: CREATE TABLE T1_n45(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n45 +POSTHOOK: Output: default@t1_n45 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n45 PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_sort_9.q.out ql/src/test/results/clientpositive/groupby_sort_9.q.out index 33e21a3e08..f20d616d4e 100644 --- ql/src/test/results/clientpositive/groupby_sort_9.q.out +++ ql/src/test/results/clientpositive/groupby_sort_9.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n96(key STRING, val STRING) PARTITIONED BY (ds s CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n96 +PREHOOK: Output: default@t1_n96 POSTHOOK: query: CREATE TABLE T1_n96(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n96 +POSTHOOK: Output: default@t1_n96 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n96 PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out index 38826ef32b..cd094468de 100644 --- ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out +++ ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n56(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n56 +PREHOOK: Output: default@t1_n56 POSTHOOK: query: CREATE TABLE T1_n56(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n56 +POSTHOOK: Output: default@t1_n56 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n56 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n56.val SIMPLE [(t1_n56)t1_n56.FieldSchema(name:val, type: PREHOOK: query: CREATE TABLE outputTbl1_n13(key int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n13 +PREHOOK: Output: default@outputtbl1_n13 POSTHOOK: query: CREATE TABLE outputTbl1_n13(key int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n13 +POSTHOOK: Output: default@outputtbl1_n13 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl1_n13 SELECT key, count(1) FROM T1_n56 GROUP BY key @@ -480,11 +480,11 @@ POSTHOOK: Input: default@outputtbl1_n13 PREHOOK: query: CREATE TABLE outputTbl2_n3(key1 int, key2 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n3 +PREHOOK: Output: default@outputtbl2_n3 POSTHOOK: query: CREATE TABLE outputTbl2_n3(key1 int, key2 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n3 +POSTHOOK: Output: default@outputtbl2_n3 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl2_n3 SELECT key, val, count(1) FROM T1_n56 GROUP BY key, val @@ -1761,11 +1761,11 @@ POSTHOOK: Input: default@outputtbl1_n13 PREHOOK: query: CREATE TABLE outputTbl3_n1(key1 int, key2 int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl3_n1 +PREHOOK: Output: default@outputtbl3_n1 POSTHOOK: query: CREATE TABLE outputTbl3_n1(key1 int, key2 int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl3_n1 +POSTHOOK: Output: default@outputtbl3_n1 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl3_n1 SELECT 1, key, count(1) FROM T1_n56 GROUP BY 1, key @@ -2213,11 +2213,11 @@ POSTHOOK: Input: default@outputtbl3_n1 PREHOOK: query: CREATE TABLE outputTbl4_n1(key1 int, key2 int, key3 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl4_n1 +PREHOOK: Output: default@outputtbl4_n1 POSTHOOK: query: CREATE TABLE outputTbl4_n1(key1 int, key2 int, key3 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl4_n1 +POSTHOOK: Output: default@outputtbl4_n1 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl4_n1 SELECT key, 1, val, count(1) FROM T1_n56 GROUP BY key, 1, val @@ -5412,12 +5412,12 @@ PREHOOK: query: CREATE TABLE T2_n34(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n34 +PREHOOK: Output: default@t2_n34 POSTHOOK: query: CREATE TABLE T2_n34(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n34 +POSTHOOK: Output: default@t2_n34 PREHOOK: query: INSERT OVERWRITE TABLE T2_n34 select key, val from T1_n56 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n56 @@ -6263,11 +6263,11 @@ POSTHOOK: Input: default@outputtbl4_n1 PREHOOK: query: CREATE TABLE outputTbl5_n1(key1 int, key2 int, key3 string, key4 int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl5_n1 +PREHOOK: Output: default@outputtbl5_n1 POSTHOOK: query: CREATE TABLE outputTbl5_n1(key1 int, key2 int, key3 string, key4 int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl5_n1 +POSTHOOK: Output: default@outputtbl5_n1 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl5_n1 SELECT key, 1, val, 2, count(1) FROM T2_n34 GROUP BY key, 1, val, 2 @@ -7640,19 +7640,19 @@ POSTHOOK: Input: default@outputtbl4_n1 PREHOOK: query: CREATE TABLE DEST1_n57(key INT, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n57 +PREHOOK: Output: default@dest1_n57 POSTHOOK: query: CREATE TABLE DEST1_n57(key INT, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n57 +POSTHOOK: Output: default@dest1_n57 PREHOOK: query: CREATE TABLE DEST2_n12(key INT, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n12 +PREHOOK: Output: default@dest2_n12 POSTHOOK: query: CREATE TABLE DEST2_n12(key INT, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n12 +POSTHOOK: Output: default@dest2_n12 PREHOOK: query: EXPLAIN FROM T2_n34 INSERT OVERWRITE TABLE DEST1_n57 SELECT key, count(1) GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_test_1.q.out ql/src/test/results/clientpositive/groupby_sort_test_1.q.out index 405374af77..5e9bbb1d91 100644 --- ql/src/test/results/clientpositive/groupby_sort_test_1.q.out +++ ql/src/test/results/clientpositive/groupby_sort_test_1.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n164(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n164 +PREHOOK: Output: default@t1_n164 POSTHOOK: query: CREATE TABLE T1_n164(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n164 +POSTHOOK: Output: default@t1_n164 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n164 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n164.val SIMPLE [(t1_n164)t1_n164.FieldSchema(name:val, ty PREHOOK: query: CREATE TABLE outputTbl1_n35(key int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n35 +PREHOOK: Output: default@outputtbl1_n35 POSTHOOK: query: CREATE TABLE outputTbl1_n35(key int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n35 +POSTHOOK: Output: default@outputtbl1_n35 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n35 SELECT key, count(1) FROM T1_n164 GROUP BY key diff --git ql/src/test/results/clientpositive/having2.q.out ql/src/test/results/clientpositive/having2.q.out index 74bb312940..f7b77e26da 100644 --- ql/src/test/results/clientpositive/having2.q.out +++ ql/src/test/results/clientpositive/having2.q.out @@ -52,7 +52,7 @@ PREHOOK: query: CREATE TABLE TestV1_Staples ( ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TestV1_Staples +PREHOOK: Output: default@testv1_staples POSTHOOK: query: CREATE TABLE TestV1_Staples ( Item_Count INT, Ship_Priority STRING, @@ -107,7 +107,7 @@ POSTHOOK: query: CREATE TABLE TestV1_Staples ( ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TestV1_Staples +POSTHOOK: Output: default@testv1_staples PREHOOK: query: explain SELECT customer_name, SUM(customer_balance), SUM(order_quantity) FROM default.testv1_staples s1 GROUP BY customer_name HAVING ( (COUNT(s1.discount) <= 822) AND diff --git ql/src/test/results/clientpositive/input22.q.out ql/src/test/results/clientpositive/input22.q.out index a6dbe370e6..41f43a343d 100644 --- ql/src/test/results/clientpositive/input22.q.out +++ ql/src/test/results/clientpositive/input22.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT4 +PREHOOK: Output: default@input4 POSTHOOK: query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT4 +POSTHOOK: Output: default@input4 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/input3_limit.q.out ql/src/test/results/clientpositive/input3_limit.q.out index a39f8ccda5..0c4eaedc9c 100644 --- ql/src/test/results/clientpositive/input3_limit.q.out +++ ql/src/test/results/clientpositive/input3_limit.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n78(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n78 +PREHOOK: Output: default@t1_n78 POSTHOOK: query: CREATE TABLE T1_n78(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n78 +POSTHOOK: Output: default@t1_n78 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1_n78 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -25,11 +25,11 @@ POSTHOOK: Output: default@t1_n78 PREHOOK: query: CREATE TABLE T2_n47(key STRING, value STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n47 +PREHOOK: Output: default@t2_n47 POSTHOOK: query: CREATE TABLE T2_n47(key STRING, value STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n47 +POSTHOOK: Output: default@t2_n47 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE T2_n47 SELECT * FROM (SELECT * FROM T1_n78 DISTRIBUTE BY key) T ORDER BY key, value LIMIT 20 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/input4.q.out ql/src/test/results/clientpositive/input4.q.out index 60ed774591..555c065f03 100644 --- ql/src/test/results/clientpositive/input4.q.out +++ ql/src/test/results/clientpositive/input4.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE INPUT4_n0(KEY STRING, VALUE STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT4_n0 +PREHOOK: Output: default@input4_n0 POSTHOOK: query: CREATE TABLE INPUT4_n0(KEY STRING, VALUE STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT4_n0 +POSTHOOK: Output: default@input4_n0 PREHOOK: query: EXPLAIN LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4_n0 PREHOOK: type: LOAD diff --git ql/src/test/results/clientpositive/input_lazyserde.q.out ql/src/test/results/clientpositive/input_lazyserde.q.out index 79cc9e72dc..07dcf92b4f 100644 --- ql/src/test/results/clientpositive/input_lazyserde.q.out +++ ql/src/test/results/clientpositive/input_lazyserde.q.out @@ -228,11 +228,11 @@ NULL PREHOOK: query: CREATE TABLE destBin(a UNIONTYPE, struct>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destBin +PREHOOK: Output: default@destbin POSTHOOK: query: CREATE TABLE destBin(a UNIONTYPE, struct>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe' STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destBin +POSTHOOK: Output: default@destbin PREHOOK: query: INSERT OVERWRITE TABLE destBin SELECT create_union( CASE WHEN key < 100 THEN 0 WHEN key < 200 THEN 1 WHEN key < 300 THEN 2 WHEN key < 400 THEN 3 ELSE 0 END, key, 2.0D, array("one","two"), struct(5,"five")) FROM srcbucket2 PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 diff --git ql/src/test/results/clientpositive/input_lazyserde2.q.out ql/src/test/results/clientpositive/input_lazyserde2.q.out index 5f3adcfb8a..338ce3e571 100644 --- ql/src/test/results/clientpositive/input_lazyserde2.q.out +++ ql/src/test/results/clientpositive/input_lazyserde2.q.out @@ -228,11 +228,11 @@ NULL PREHOOK: query: CREATE TABLE destBin_n0(a UNIONTYPE, struct>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe2' STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destBin_n0 +PREHOOK: Output: default@destbin_n0 POSTHOOK: query: CREATE TABLE destBin_n0(a UNIONTYPE, struct>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe2' STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destBin_n0 +POSTHOOK: Output: default@destbin_n0 PREHOOK: query: INSERT OVERWRITE TABLE destBin_n0 SELECT create_union( CASE WHEN key < 100 THEN 0 WHEN key < 200 THEN 1 WHEN key < 300 THEN 2 WHEN key < 400 THEN 3 ELSE 0 END, key, 2.0D, array("one","two"), struct(5,"five")) FROM srcbucket2 PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 diff --git ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out index 8d571b68f5..e4ed7b8e40 100644 --- ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out +++ ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out @@ -13,11 +13,11 @@ POSTHOOK: Output: database:db2 PREHOOK: query: CREATE TABLE db1.sourceTable (one string,two string) PARTITIONED BY (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:db1 -PREHOOK: Output: db1@sourceTable +PREHOOK: Output: db1@sourcetable POSTHOOK: query: CREATE TABLE db1.sourceTable (one string,two string) PARTITIONED BY (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:db1 -POSTHOOK: Output: db1@sourceTable +POSTHOOK: Output: db1@sourcetable PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -38,11 +38,11 @@ POSTHOOK: Output: db1@sourcetable@ds=2011-11-11 PREHOOK: query: CREATE TABLE db2.destinTable (one string,two string) PARTITIONED BY (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:db2 -PREHOOK: Output: db2@destinTable +PREHOOK: Output: db2@destintable POSTHOOK: query: CREATE TABLE db2.destinTable (one string,two string) PARTITIONED BY (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:db2 -POSTHOOK: Output: db2@destinTable +POSTHOOK: Output: db2@destintable PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE db2.destinTable PARTITION (ds='2011-11-11') SELECT one,two FROM db1.sourceTable WHERE ds='2011-11-11' order by one desc, two desc limit 5 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/join42.q.out ql/src/test/results/clientpositive/join42.q.out index a32e0b4d98..84f8b34fa4 100644 --- ql/src/test/results/clientpositive/join42.q.out +++ ql/src/test/results/clientpositive/join42.q.out @@ -2,23 +2,23 @@ PREHOOK: query: create table L as select 4436 id PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: database:default -PREHOOK: Output: default@L +PREHOOK: Output: default@l POSTHOOK: query: create table L as select 4436 id POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: database:default -POSTHOOK: Output: default@L +POSTHOOK: Output: default@l POSTHOOK: Lineage: l.id SIMPLE [] PREHOOK: query: create table LA_n11 as select 4436 loan_id, 4748 aid, 4415 pi_id PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: database:default -PREHOOK: Output: default@LA_n11 +PREHOOK: Output: default@la_n11 POSTHOOK: query: create table LA_n11 as select 4436 loan_id, 4748 aid, 4415 pi_id POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: database:default -POSTHOOK: Output: default@LA_n11 +POSTHOOK: Output: default@la_n11 POSTHOOK: Lineage: la_n11.aid SIMPLE [] POSTHOOK: Lineage: la_n11.loan_id SIMPLE [] POSTHOOK: Lineage: la_n11.pi_id SIMPLE [] @@ -26,34 +26,34 @@ PREHOOK: query: create table FR as select 4436 loan_id PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: database:default -PREHOOK: Output: default@FR +PREHOOK: Output: default@fr POSTHOOK: query: create table FR as select 4436 loan_id POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: database:default -POSTHOOK: Output: default@FR +POSTHOOK: Output: default@fr POSTHOOK: Lineage: fr.loan_id SIMPLE [] PREHOOK: query: create table A_n11 as select 4748 id PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: database:default -PREHOOK: Output: default@A_n11 +PREHOOK: Output: default@a_n11 POSTHOOK: query: create table A_n11 as select 4748 id POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n11 +POSTHOOK: Output: default@a_n11 POSTHOOK: Lineage: a_n11.id SIMPLE [] PREHOOK: query: create table PI as select 4415 id PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: database:default -PREHOOK: Output: default@PI +PREHOOK: Output: default@pi POSTHOOK: query: create table PI as select 4415 id POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: database:default -POSTHOOK: Output: default@PI +POSTHOOK: Output: default@pi POSTHOOK: Lineage: pi.id SIMPLE [] PREHOOK: query: create table acct as select 4748 aid, 10 acc_n, 122 brn PREHOOK: type: CREATETABLE_AS_SELECT diff --git ql/src/test/results/clientpositive/join_reorder.q.out ql/src/test/results/clientpositive/join_reorder.q.out index 14557a5b99..5ce1f96fa3 100644 --- ql/src/test/results/clientpositive/join_reorder.q.out +++ ql/src/test/results/clientpositive/join_reorder.q.out @@ -1,27 +1,27 @@ PREHOOK: query: CREATE TABLE T1_n37(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n37 +PREHOOK: Output: default@t1_n37 POSTHOOK: query: CREATE TABLE T1_n37(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n37 +POSTHOOK: Output: default@t1_n37 PREHOOK: query: CREATE TABLE T2_n24(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n24 +PREHOOK: Output: default@t2_n24 POSTHOOK: query: CREATE TABLE T2_n24(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n24 +POSTHOOK: Output: default@t2_n24 PREHOOK: query: CREATE TABLE T3_n8(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n8 +PREHOOK: Output: default@t3_n8 POSTHOOK: query: CREATE TABLE T3_n8(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n8 +POSTHOOK: Output: default@t3_n8 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n37 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/join_reorder2.q.out ql/src/test/results/clientpositive/join_reorder2.q.out index 76fdd600a1..d6ece51de3 100644 --- ql/src/test/results/clientpositive/join_reorder2.q.out +++ ql/src/test/results/clientpositive/join_reorder2.q.out @@ -1,35 +1,35 @@ PREHOOK: query: CREATE TABLE T1_n49(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n49 +PREHOOK: Output: default@t1_n49 POSTHOOK: query: CREATE TABLE T1_n49(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n49 +POSTHOOK: Output: default@t1_n49 PREHOOK: query: CREATE TABLE T2_n30(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n30 +PREHOOK: Output: default@t2_n30 POSTHOOK: query: CREATE TABLE T2_n30(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n30 +POSTHOOK: Output: default@t2_n30 PREHOOK: query: CREATE TABLE T3_n10(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n10 +PREHOOK: Output: default@t3_n10 POSTHOOK: query: CREATE TABLE T3_n10(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n10 +POSTHOOK: Output: default@t3_n10 PREHOOK: query: CREATE TABLE T4_n3(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n3 +PREHOOK: Output: default@t4_n3 POSTHOOK: query: CREATE TABLE T4_n3(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n3 +POSTHOOK: Output: default@t4_n3 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n49 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/join_reorder3.q.out ql/src/test/results/clientpositive/join_reorder3.q.out index 35ec179029..c62dc4af7d 100644 --- ql/src/test/results/clientpositive/join_reorder3.q.out +++ ql/src/test/results/clientpositive/join_reorder3.q.out @@ -1,35 +1,35 @@ PREHOOK: query: CREATE TABLE T1_n92(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n92 +PREHOOK: Output: default@t1_n92 POSTHOOK: query: CREATE TABLE T1_n92(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n92 +POSTHOOK: Output: default@t1_n92 PREHOOK: query: CREATE TABLE T2_n57(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n57 +PREHOOK: Output: default@t2_n57 POSTHOOK: query: CREATE TABLE T2_n57(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n57 +POSTHOOK: Output: default@t2_n57 PREHOOK: query: CREATE TABLE T3_n21(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n21 +PREHOOK: Output: default@t3_n21 POSTHOOK: query: CREATE TABLE T3_n21(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n21 +POSTHOOK: Output: default@t3_n21 PREHOOK: query: CREATE TABLE T4_n10(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n10 +PREHOOK: Output: default@t4_n10 POSTHOOK: query: CREATE TABLE T4_n10(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n10 +POSTHOOK: Output: default@t4_n10 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n92 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/join_reorder4.q.out ql/src/test/results/clientpositive/join_reorder4.q.out index f57e897443..2c21630ee0 100644 --- ql/src/test/results/clientpositive/join_reorder4.q.out +++ ql/src/test/results/clientpositive/join_reorder4.q.out @@ -1,27 +1,27 @@ PREHOOK: query: CREATE TABLE T1_n134(key1 STRING, val1 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n134 +PREHOOK: Output: default@t1_n134 POSTHOOK: query: CREATE TABLE T1_n134(key1 STRING, val1 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n134 +POSTHOOK: Output: default@t1_n134 PREHOOK: query: CREATE TABLE T2_n80(key2 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n80 +PREHOOK: Output: default@t2_n80 POSTHOOK: query: CREATE TABLE T2_n80(key2 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n80 +POSTHOOK: Output: default@t2_n80 PREHOOK: query: CREATE TABLE T3_n32(key3 STRING, val3 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n32 +PREHOOK: Output: default@t3_n32 POSTHOOK: query: CREATE TABLE T3_n32(key3 STRING, val3 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n32 +POSTHOOK: Output: default@t3_n32 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n134 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/acid_bloom_filter_orc_file_dump.q.out ql/src/test/results/clientpositive/llap/acid_bloom_filter_orc_file_dump.q.out index da805b0f1c..d309b1640a 100644 --- ql/src/test/results/clientpositive/llap/acid_bloom_filter_orc_file_dump.q.out +++ ql/src/test/results/clientpositive/llap/acid_bloom_filter_orc_file_dump.q.out @@ -23,7 +23,7 @@ TBLPROPERTIES ( ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@bloomTest +PREHOOK: Output: default@bloomtest POSTHOOK: query: CREATE TABLE bloomTest( msisdn STRING, imsi VARCHAR(20), @@ -45,7 +45,7 @@ TBLPROPERTIES ( ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@bloomTest +POSTHOOK: Output: default@bloomtest PREHOOK: query: INSERT INTO bloomTest VALUES ('12345', '12345', 12345, 12345) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/acid_meta_columns_decode.q.out ql/src/test/results/clientpositive/llap/acid_meta_columns_decode.q.out index 87c03e2d75..33898186db 100644 --- ql/src/test/results/clientpositive/llap/acid_meta_columns_decode.q.out +++ ql/src/test/results/clientpositive/llap/acid_meta_columns_decode.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE transactional TABLE acidTblDefault(a int, b INT) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acidTblDefault +PREHOOK: Output: default@acidtbldefault POSTHOOK: query: CREATE transactional TABLE acidTblDefault(a int, b INT) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acidTblDefault +POSTHOOK: Output: default@acidtbldefault PREHOOK: query: INSERT INTO TABLE acidTblDefault VALUES (1,2),(2,3),(3,4) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/allow_change_col_type_par.q.out ql/src/test/results/clientpositive/llap/allow_change_col_type_par.q.out index d1905e24c7..85f30d1dfd 100644 --- ql/src/test/results/clientpositive/llap/allow_change_col_type_par.q.out +++ ql/src/test/results/clientpositive/llap/allow_change_col_type_par.q.out @@ -24,7 +24,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Change Column - table name: default.t1_n14 + table name: hive.default.t1_n14 new column name: c1 new column type: smallint old column name: c1 diff --git ql/src/test/results/clientpositive/llap/alter_partition_change_col.q.out ql/src/test/results/clientpositive/llap/alter_partition_change_col.q.out index 9a5ac432ac..890407e2f1 100644 --- ql/src/test/results/clientpositive/llap/alter_partition_change_col.q.out +++ ql/src/test/results/clientpositive/llap/alter_partition_change_col.q.out @@ -945,7 +945,7 @@ col_1col_1col_1col_1col_1col_1158 string) `partition_col` string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@alterPartTbl +PREHOOK: Output: default@alterparttbl POSTHOOK: query: CREATE TABLE `alterPartTbl`( col_1col_1col_1col_1col_1col_11 string, col_1col_1col_1col_1col_1col_12 string, @@ -1109,7 +1109,7 @@ col_1col_1col_1col_1col_1col_1158 string) `partition_col` string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@alterPartTbl +POSTHOOK: Output: default@alterparttbl PREHOOK: query: alter table alterPartTbl add partition(partition_col='CCL') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: default@alterparttbl diff --git ql/src/test/results/clientpositive/llap/alter_view_as_select.q.out ql/src/test/results/clientpositive/llap/alter_view_as_select.q.out index b024443542..a1eda5a36f 100644 --- ql/src/test/results/clientpositive/llap/alter_view_as_select.q.out +++ ql/src/test/results/clientpositive/llap/alter_view_as_select.q.out @@ -8,16 +8,16 @@ PREHOOK: query: CREATE VIEW tv.testView as SELECT * FROM srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:tv -PREHOOK: Output: tv@testView +PREHOOK: Output: tv@testview POSTHOOK: query: CREATE VIEW tv.testView as SELECT * FROM srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:tv -POSTHOOK: Output: tv@testView -POSTHOOK: Lineage: testView.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] -POSTHOOK: Lineage: testView.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] -POSTHOOK: Lineage: testView.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: testView.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: tv@testview +POSTHOOK: Lineage: testview.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: testview.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: testview.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: testview.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE PREHOOK: Input: tv@testview @@ -55,12 +55,12 @@ PREHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:tv -PREHOOK: Output: tv@testView +PREHOOK: Output: tv@testview POSTHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:tv -POSTHOOK: Output: tv@testView +POSTHOOK: Output: tv@testview PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE PREHOOK: Input: tv@testview @@ -99,7 +99,7 @@ LIMIT 10 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:tv -PREHOOK: Output: tv@testView +PREHOOK: Output: tv@testview POSTHOOK: query: ALTER VIEW tv.testView AS SELECT * FROM src WHERE key > 80 AND key < 100 @@ -108,7 +108,7 @@ LIMIT 10 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:tv -POSTHOOK: Output: tv@testView +POSTHOOK: Output: tv@testview PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE PREHOOK: Input: tv@testview diff --git ql/src/test/results/clientpositive/llap/avro_alter_table_update_columns.q.out ql/src/test/results/clientpositive/llap/avro_alter_table_update_columns.q.out index 683de0a7a2..b9b75d644a 100644 --- ql/src/test/results/clientpositive/llap/avro_alter_table_update_columns.q.out +++ ql/src/test/results/clientpositive/llap/avro_alter_table_update_columns.q.out @@ -113,7 +113,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Update Columns - table name: default.avro_extschema_literal_n1 + table name: hive.default.avro_extschema_literal_n1 cascade: true PREHOOK: query: ALTER TABLE avro_extschema_literal_n1 UPDATE COLUMNS CASCADE @@ -329,7 +329,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Update Columns - table name: default.avro_extschema_url_parted + table name: hive.default.avro_extschema_url_parted partition: p1 2018 diff --git ql/src/test/results/clientpositive/llap/avro_comments.q.out ql/src/test/results/clientpositive/llap/avro_comments.q.out index fbd472771e..135e66ed0e 100644 --- ql/src/test/results/clientpositive/llap/avro_comments.q.out +++ ql/src/test/results/clientpositive/llap/avro_comments.q.out @@ -38,7 +38,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments1 +PREHOOK: Output: default@testavrocomments1 POSTHOOK: query: CREATE TABLE testAvroComments1 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -75,7 +75,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments1 +POSTHOOK: Output: default@testavrocomments1 PREHOOK: query: DESCRIBE testAvroComments1 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments1 @@ -132,7 +132,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments2 +PREHOOK: Output: default@testavrocomments2 POSTHOOK: query: CREATE TABLE testAvroComments2 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -167,7 +167,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments2 +POSTHOOK: Output: default@testavrocomments2 PREHOOK: query: DESCRIBE testAvroComments2 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments2 @@ -222,7 +222,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments3 +PREHOOK: Output: default@testavrocomments3 POSTHOOK: query: CREATE TABLE testAvroComments3 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -255,7 +255,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments3 +POSTHOOK: Output: default@testavrocomments3 PREHOOK: query: DESCRIBE testAvroComments3 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments3 @@ -286,7 +286,7 @@ PREHOOK: query: CREATE TABLE testAvroComments4 ( STORED AS AVRO PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments4 +PREHOOK: Output: default@testavrocomments4 POSTHOOK: query: CREATE TABLE testAvroComments4 ( number int COMMENT "Order of playing the role", first_name string COMMENT "first name of actor playing role", @@ -295,7 +295,7 @@ POSTHOOK: query: CREATE TABLE testAvroComments4 ( STORED AS AVRO POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments4 +POSTHOOK: Output: default@testavrocomments4 PREHOOK: query: DESCRIBE testAvroComments4 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments4 @@ -326,7 +326,7 @@ PREHOOK: query: CREATE TABLE testAvroComments5 ( STORED AS AVRO PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments5 +PREHOOK: Output: default@testavrocomments5 POSTHOOK: query: CREATE TABLE testAvroComments5 ( number int COMMENT "Order of playing the role", first_name string, @@ -335,7 +335,7 @@ POSTHOOK: query: CREATE TABLE testAvroComments5 ( STORED AS AVRO POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments5 +POSTHOOK: Output: default@testavrocomments5 PREHOOK: query: DESCRIBE testAvroComments5 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments5 @@ -366,7 +366,7 @@ PREHOOK: query: CREATE TABLE testAvroComments6 ( STORED AS AVRO PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments6 +PREHOOK: Output: default@testavrocomments6 POSTHOOK: query: CREATE TABLE testAvroComments6 ( number int, first_name string, @@ -375,7 +375,7 @@ POSTHOOK: query: CREATE TABLE testAvroComments6 ( STORED AS AVRO POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments6 +POSTHOOK: Output: default@testavrocomments6 PREHOOK: query: DESCRIBE testAvroComments6 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments6 diff --git ql/src/test/results/clientpositive/llap/bucketizedhiveinputformat.q.out ql/src/test/results/clientpositive/llap/bucketizedhiveinputformat.q.out index 25b873eab0..36f4493336 100644 --- ql/src/test/results/clientpositive/llap/bucketizedhiveinputformat.q.out +++ ql/src/test/results/clientpositive/llap/bucketizedhiveinputformat.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n125(name STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n125 +PREHOOK: Output: default@t1_n125 POSTHOOK: query: CREATE TABLE T1_n125(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n125 +POSTHOOK: Output: default@t1_n125 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1_n125 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@t1_n125 PREHOOK: query: CREATE TABLE T2_n74(name STRING) STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n74 +PREHOOK: Output: default@t2_n74 POSTHOOK: query: CREATE TABLE T2_n74(name STRING) STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n74 +POSTHOOK: Output: default@t2_n74 Warning: Shuffle Join MERGEJOIN[25][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product PREHOOK: query: INSERT OVERWRITE TABLE T2_n74 SELECT * FROM ( @@ -46,11 +46,11 @@ POSTHOOK: Lineage: t2_n74.name SIMPLE [(t1_n125)t1_n125.FieldSchema(name:name, t PREHOOK: query: CREATE TABLE T3_n28(name STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n28 +PREHOOK: Output: default@t3_n28 POSTHOOK: query: CREATE TABLE T3_n28(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n28 +POSTHOOK: Output: default@t3_n28 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3_n28 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/check_constraint.q.out ql/src/test/results/clientpositive/llap/check_constraint.q.out index bc5d361859..8ab14ba2b8 100644 --- ql/src/test/results/clientpositive/llap/check_constraint.q.out +++ ql/src/test/results/clientpositive/llap/check_constraint.q.out @@ -430,7 +430,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Add Constraint - table name: default.tmulti + table name: hive.default.tmulti PREHOOK: query: alter table tmulti add constraint un1 UNIQUE (userName, numClicks) DISABLE PREHOOK: type: ALTERTABLE_ADDCONSTRAINT @@ -2149,7 +2149,7 @@ STAGE PLANS: Stage: Stage-0 Drop Constraint constraint name: ch2 - table name: default.acid_uami_n0 + table name: hive.default.acid_uami_n0 PREHOOK: query: ALTER table acid_uami_n0 drop constraint ch2 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT @@ -3091,13 +3091,13 @@ PREHOOK: query: CREATE TABLE numericDataType(a TINYINT CONSTRAINT tinyint_constr clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@numericDataType +PREHOOK: Output: default@numericdatatype POSTHOOK: query: CREATE TABLE numericDataType(a TINYINT CONSTRAINT tinyint_constraint DEFAULT 127Y ENABLE, b bigint CONSTRAINT check1 CHECK (b in(4,5)) ENABLE) clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@numericDataType +POSTHOOK: Output: default@numericdatatype PREHOOK: query: DESC FORMATTED numericDataType PREHOOK: type: DESCTABLE PREHOOK: Input: default@numericdatatype diff --git ql/src/test/results/clientpositive/llap/column_access_stats.q.out ql/src/test/results/clientpositive/llap/column_access_stats.q.out index ba4aa68d50..b7dd6ea574 100644 --- ql/src/test/results/clientpositive/llap/column_access_stats.q.out +++ ql/src/test/results/clientpositive/llap/column_access_stats.q.out @@ -1,7 +1,7 @@ PREHOOK: query: CREATE TABLE T1_n127(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n127 +PREHOOK: Output: default@t1_n127 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n127 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -9,15 +9,15 @@ PREHOOK: Output: default@t1_n127 PREHOOK: query: CREATE TABLE T2_n75(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n75 +PREHOOK: Output: default@t2_n75 PREHOOK: query: CREATE TABLE T3_n29(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n29 +PREHOOK: Output: default@t3_n29 PREHOOK: query: CREATE TABLE T4_n16(key STRING, val STRING) PARTITIONED BY (p STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n16 +PREHOOK: Output: default@t4_n16 PREHOOK: query: SELECT key FROM T1_n127 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n127 diff --git ql/src/test/results/clientpositive/llap/column_name_is_table_alias.q.out ql/src/test/results/clientpositive/llap/column_name_is_table_alias.q.out index 3b79e330cc..ccf9083eac 100644 --- ql/src/test/results/clientpositive/llap/column_name_is_table_alias.q.out +++ ql/src/test/results/clientpositive/llap/column_name_is_table_alias.q.out @@ -9,19 +9,19 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE tableA (a INTEGER,z INTEGER) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tableA +PREHOOK: Output: default@tablea POSTHOOK: query: CREATE TABLE tableA (a INTEGER,z INTEGER) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tableA +POSTHOOK: Output: default@tablea PREHOOK: query: CREATE TABLE tableB (a INTEGER,b INTEGER,z INTEGER) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tableB +PREHOOK: Output: default@tableb POSTHOOK: query: CREATE TABLE tableB (a INTEGER,b INTEGER,z INTEGER) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tableB +POSTHOOK: Output: default@tableb PREHOOK: query: SELECT a.z, b.b FROM tableB AS b JOIN tableA AS a ON a.a=b.b PREHOOK: type: QUERY PREHOOK: Input: default@tablea diff --git ql/src/test/results/clientpositive/llap/compustat_avro.q.out ql/src/test/results/clientpositive/llap/compustat_avro.q.out index 3655dfab0e..d0fe152a47 100644 --- ql/src/test/results/clientpositive/llap/compustat_avro.q.out +++ ql/src/test/results/clientpositive/llap/compustat_avro.q.out @@ -12,7 +12,7 @@ PREHOOK: query: create table testAvro #### A masked pattern was here #### PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvro +PREHOOK: Output: default@testavro POSTHOOK: query: create table testAvro ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -23,7 +23,7 @@ POSTHOOK: query: create table testAvro #### A masked pattern was here #### POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvro +POSTHOOK: Output: default@testavro PREHOOK: query: describe formatted testAvro col1 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavro diff --git ql/src/test/results/clientpositive/llap/constraints_alter.q.out ql/src/test/results/clientpositive/llap/constraints_alter.q.out index a66d148a14..bced4045c8 100644 --- ql/src/test/results/clientpositive/llap/constraints_alter.q.out +++ ql/src/test/results/clientpositive/llap/constraints_alter.q.out @@ -21,7 +21,7 @@ FROM ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@COLUMNS_V2 +PREHOOK: Output: default@columns_v2 POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `COLUMNS_V2` ( `CD_ID` bigint, `COMMENT` string, @@ -45,7 +45,7 @@ FROM ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@COLUMNS_V2 +POSTHOOK: Output: default@columns_v2 PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `KEY_CONSTRAINTS` ( `CHILD_CD_ID` bigint, @@ -86,7 +86,7 @@ FROM ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@KEY_CONSTRAINTS +PREHOOK: Output: default@key_constraints POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `KEY_CONSTRAINTS` ( `CHILD_CD_ID` bigint, @@ -127,7 +127,7 @@ FROM ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@KEY_CONSTRAINTS +POSTHOOK: Output: default@key_constraints PREHOOK: query: create table t1_n2939 (a_n2939 integer, b_n2939 string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out index 527416fe03..49309a68c9 100644 --- ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out +++ ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n146(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n146 +PREHOOK: Output: default@t1_n146 POSTHOOK: query: CREATE TABLE T1_n146(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n146 +POSTHOOK: Output: default@t1_n146 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n146 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@t1_n146 PREHOOK: query: CREATE TABLE T2_n86(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n86 +PREHOOK: Output: default@t2_n86 POSTHOOK: query: CREATE TABLE T2_n86(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n86 +POSTHOOK: Output: default@t2_n86 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n86 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -33,11 +33,11 @@ POSTHOOK: Output: default@t2_n86 PREHOOK: query: CREATE TABLE T3_n34(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n34 +PREHOOK: Output: default@t3_n34 POSTHOOK: query: CREATE TABLE T3_n34(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n34 +POSTHOOK: Output: default@t3_n34 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T3_n34 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/create_like.q.out ql/src/test/results/clientpositive/llap/create_like.q.out index 8867dc19d5..2fb9877695 100644 --- ql/src/test/results/clientpositive/llap/create_like.q.out +++ ql/src/test/results/clientpositive/llap/create_like.q.out @@ -465,19 +465,19 @@ Storage Desc Params: PREHOOK: query: CREATE TABLE PropertiedParquetTable(a INT, b STRING) STORED AS PARQUET TBLPROPERTIES("parquet.compression"="LZO") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PropertiedParquetTable +PREHOOK: Output: default@propertiedparquettable POSTHOOK: query: CREATE TABLE PropertiedParquetTable(a INT, b STRING) STORED AS PARQUET TBLPROPERTIES("parquet.compression"="LZO") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PropertiedParquetTable +POSTHOOK: Output: default@propertiedparquettable PREHOOK: query: CREATE TABLE LikePropertiedParquetTable LIKE PropertiedParquetTable PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@LikePropertiedParquetTable +PREHOOK: Output: default@likepropertiedparquettable POSTHOOK: query: CREATE TABLE LikePropertiedParquetTable LIKE PropertiedParquetTable POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@LikePropertiedParquetTable +POSTHOOK: Output: default@likepropertiedparquettable PREHOOK: query: DESCRIBE FORMATTED LikePropertiedParquetTable PREHOOK: type: DESCTABLE PREHOOK: Input: default@likepropertiedparquettable diff --git ql/src/test/results/clientpositive/llap/create_union_table.q.out ql/src/test/results/clientpositive/llap/create_union_table.q.out index 5cd79f347b..024800353b 100644 --- ql/src/test/results/clientpositive/llap/create_union_table.q.out +++ ql/src/test/results/clientpositive/llap/create_union_table.q.out @@ -15,10 +15,10 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: mydata uniontype,struct>, strct struct - name: default.abc input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.abc PREHOOK: query: create table abc(mydata uniontype,struct>, strct struct) diff --git ql/src/test/results/clientpositive/llap/create_view_translate.q.out ql/src/test/results/clientpositive/llap/create_view_translate.q.out index b5d464e716..40a450489e 100644 --- ql/src/test/results/clientpositive/llap/create_view_translate.q.out +++ ql/src/test/results/clientpositive/llap/create_view_translate.q.out @@ -142,7 +142,7 @@ STAGE PLANS: Create View columns: id int, _c1 string expanded text: SELECT `items`.`id`, `items`.`info`['price'] FROM `default`.`items` - name: default.priceview + name: hive.default.priceview original text: SELECT items.id, items.info['price'] FROM items PREHOOK: query: CREATE VIEW priceview AS SELECT items.id, items.info['price'] FROM items diff --git ql/src/test/results/clientpositive/llap/create_with_constraints.q.out ql/src/test/results/clientpositive/llap/create_with_constraints.q.out index ba317581d5..3b9a2279db 100644 --- ql/src/test/results/clientpositive/llap/create_with_constraints.q.out +++ ql/src/test/results/clientpositive/llap/create_with_constraints.q.out @@ -2293,12 +2293,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:dbconstraint PREHOOK: query: CREATE TABLE Table2 (a STRING, b STRING NOT NULL DISABLE, CONSTRAINT Pk1 PRIMARY KEY (a) DISABLE) PREHOOK: type: CREATETABLE -PREHOOK: Output: DbConstraint@Table2 PREHOOK: Output: database:dbconstraint +PREHOOK: Output: dbconstraint@table2 POSTHOOK: query: CREATE TABLE Table2 (a STRING, b STRING NOT NULL DISABLE, CONSTRAINT Pk1 PRIMARY KEY (a) DISABLE) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: DbConstraint@Table2 POSTHOOK: Output: database:dbconstraint +POSTHOOK: Output: dbconstraint@table2 PREHOOK: query: USE default PREHOOK: type: SWITCHDATABASE PREHOOK: Input: database:default @@ -2599,12 +2599,12 @@ PREHOOK: query: CREATE TABLE numericDataType_n0(a TINYINT, b SMALLINT NOT NULL E d BIGINT , e DOUBLE , f DECIMAL(9,2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@numericDataType_n0 +PREHOOK: Output: default@numericdatatype_n0 POSTHOOK: query: CREATE TABLE numericDataType_n0(a TINYINT, b SMALLINT NOT NULL ENABLE, c INT, d BIGINT , e DOUBLE , f DECIMAL(9,2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@numericDataType_n0 +POSTHOOK: Output: default@numericdatatype_n0 PREHOOK: query: INSERT INTO numericDataType_n0 values(2,45,5667,67890,5.6,678.5) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/cross_prod_1.q.out ql/src/test/results/clientpositive/llap/cross_prod_1.q.out index b3ecee0f03..eb2b721111 100644 --- ql/src/test/results/clientpositive/llap/cross_prod_1.q.out +++ ql/src/test/results/clientpositive/llap/cross_prod_1.q.out @@ -3,13 +3,13 @@ select distinct * from src order by key limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@X_n0 +PREHOOK: Output: default@x_n0 POSTHOOK: query: create table X_n0 as select distinct * from src order by key limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@X_n0 +POSTHOOK: Output: default@x_n0 POSTHOOK: Lineage: x_n0.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: x_n0.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Shuffle Join MERGEJOIN[11][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product diff --git ql/src/test/results/clientpositive/llap/cross_prod_3.q.out ql/src/test/results/clientpositive/llap/cross_prod_3.q.out index b36ee65884..c0333be76e 100644 --- ql/src/test/results/clientpositive/llap/cross_prod_3.q.out +++ ql/src/test/results/clientpositive/llap/cross_prod_3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table X_n2 (key string, value string) clustered by (key) into 2 buckets PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@X_n2 +PREHOOK: Output: default@x_n2 POSTHOOK: query: create table X_n2 (key string, value string) clustered by (key) into 2 buckets POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@X_n2 +POSTHOOK: Output: default@x_n2 PREHOOK: query: insert overwrite table X_n2 select distinct * from src order by key limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -21,13 +21,13 @@ select * from src order by key limit 1 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@Y_n0 +PREHOOK: Output: default@y_n0 POSTHOOK: query: create table Y_n0 as select * from src order by key limit 1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@Y_n0 +POSTHOOK: Output: default@y_n0 POSTHOOK: Lineage: y_n0.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: y_n0.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: explain select * from Y_n0, (select * from X_n2 as A join X_n2 as B on A.key=B.key) as C where Y_n0.key=C.key diff --git ql/src/test/results/clientpositive/llap/cross_prod_4.q.out ql/src/test/results/clientpositive/llap/cross_prod_4.q.out index cb0cf816c6..ab7bba0a8f 100644 --- ql/src/test/results/clientpositive/llap/cross_prod_4.q.out +++ ql/src/test/results/clientpositive/llap/cross_prod_4.q.out @@ -3,13 +3,13 @@ select distinct * from src order by key limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@X_n1 +PREHOOK: Output: default@x_n1 POSTHOOK: query: create table X_n1 as select distinct * from src order by key limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@X_n1 +POSTHOOK: Output: default@x_n1 POSTHOOK: Lineage: x_n1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: x_n1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product diff --git ql/src/test/results/clientpositive/llap/cross_product_check_1.q.out ql/src/test/results/clientpositive/llap/cross_product_check_1.q.out index 5b3bf1c902..8e2e831605 100644 --- ql/src/test/results/clientpositive/llap/cross_product_check_1.q.out +++ ql/src/test/results/clientpositive/llap/cross_product_check_1.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n8 +PREHOOK: Output: default@a_n8 POSTHOOK: query: create table A_n8 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n8 +POSTHOOK: Output: default@a_n8 POSTHOOK: Lineage: a_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n6 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n6 +PREHOOK: Output: default@b_n6 POSTHOOK: query: create table B_n6 as select * from src limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n6 +POSTHOOK: Output: default@b_n6 POSTHOOK: Lineage: b_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product diff --git ql/src/test/results/clientpositive/llap/cross_product_check_2.q.out ql/src/test/results/clientpositive/llap/cross_product_check_2.q.out index f4bf4fbaa7..604fb87d53 100644 --- ql/src/test/results/clientpositive/llap/cross_product_check_2.q.out +++ ql/src/test/results/clientpositive/llap/cross_product_check_2.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n2 +PREHOOK: Output: default@a_n2 POSTHOOK: query: create table A_n2 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n2 +POSTHOOK: Output: default@a_n2 POSTHOOK: Lineage: a_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n2 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n2 +PREHOOK: Output: default@b_n2 POSTHOOK: query: create table B_n2 as select * from src order by key limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n2 +POSTHOOK: Output: default@b_n2 POSTHOOK: Lineage: b_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product diff --git ql/src/test/results/clientpositive/llap/ctas.q.out ql/src/test/results/clientpositive/llap/ctas.q.out index 275bebd3b9..ab33e1c83a 100644 --- ql/src/test/results/clientpositive/llap/ctas.q.out +++ ql/src/test/results/clientpositive/llap/ctas.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table nzhang_Tmp(a int, b string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_Tmp +PREHOOK: Output: default@nzhang_tmp POSTHOOK: query: create table nzhang_Tmp(a int, b string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_Tmp +POSTHOOK: Output: default@nzhang_tmp PREHOOK: query: select * from nzhang_Tmp PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_tmp @@ -18,12 +18,12 @@ PREHOOK: query: explain create table nzhang_CTAS1 as select key k, value from sr PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-2 depends on stages: Stage-1 @@ -103,7 +103,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_CTAS1 + name: default.nzhang_ctas1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -141,10 +141,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: k string, value string - name: default.nzhang_CTAS1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas1 Stage: Stage-3 Stats Work @@ -152,7 +152,7 @@ STAGE PLANS: Column Stats Desc: Columns: k, value Column Types: string, string - Table: default.nzhang_CTAS1 + Table: default.nzhang_ctas1 Stage: Stage-0 Move Operator @@ -164,12 +164,12 @@ PREHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort b PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 POSTHOOK: Lineage: nzhang_ctas1.k SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_ctas1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from nzhang_CTAS1 @@ -352,10 +352,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string - name: default.nzhang_ctas2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas2 Stage: Stage-3 Stats Work @@ -563,10 +563,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: half_key double, conb string - name: default.nzhang_ctas3 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde name: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: hive.default.nzhang_ctas3 Stage: Stage-3 Stats Work @@ -838,11 +838,11 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string - name: default.nzhang_ctas4 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas4 Stage: Stage-3 Stats Work @@ -1051,13 +1051,13 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string - name: default.nzhang_ctas5 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat line delimiter: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas5 Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_compact1.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_compact1.q.out index cebcb15981..572107c84a 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_compact1.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_compact1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n153(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n153 +PREHOOK: Output: default@t1_n153 POSTHOOK: query: create table T1_n153(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n153 +POSTHOOK: Output: default@t1_n153 PREHOOK: query: alter table T1_n153 compact 'major' PREHOOK: type: ALTERTABLE_COMPACT POSTHOOK: query: alter table T1_n153 compact 'major' diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_compact2.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_compact2.q.out index 9d744e2c61..3be5d2f281 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_compact2.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_compact2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n105(key string, val string) partitioned by (ds string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n105 +PREHOOK: Output: default@t1_n105 POSTHOOK: query: create table T1_n105(key string, val string) partitioned by (ds string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n105 +POSTHOOK: Output: default@t1_n105 PREHOOK: query: alter table T1_n105 add partition (ds = 'today') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: default@t1_n105 @@ -37,7 +37,7 @@ STAGE PLANS: compaction type: minor partition spec: ds yesterday - table name: default.T1_n105 + table name: default.t1_n105 PREHOOK: query: alter table T1_n105 partition (ds = 'yesterday') compact 'minor' PREHOOK: type: ALTERTABLE_COMPACT diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_compact3.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_compact3.q.out index 707548562f..74d56b230c 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_compact3.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_compact3.q.out @@ -12,11 +12,11 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:d1 PREHOOK: query: create table T1_n71(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE -PREHOOK: Output: D1@T1_n71 +PREHOOK: Output: d1@t1_n71 PREHOOK: Output: database:d1 POSTHOOK: query: create table T1_n71(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE -POSTHOOK: Output: D1@T1_n71 +POSTHOOK: Output: d1@t1_n71 POSTHOOK: Output: database:d1 PREHOOK: query: alter table T1_n71 compact 'major' PREHOOK: type: ALTERTABLE_COMPACT diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_ddl1.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_ddl1.q.out index b312cb869b..1dda1b088a 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_ddl1.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_ddl1.q.out @@ -21,19 +21,19 @@ POSTHOOK: Output: database:d1 PREHOOK: query: create table T1_n50(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n50 +PREHOOK: Output: default@t1_n50 POSTHOOK: query: create table T1_n50(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n50 +POSTHOOK: Output: default@t1_n50 PREHOOK: query: create table T2_n31 like T1_n50 PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n31 +PREHOOK: Output: default@t2_n31 POSTHOOK: query: create table T2_n31 like T1_n50 POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n31 +POSTHOOK: Output: default@t2_n31 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n50 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -60,12 +60,12 @@ PREHOOK: query: create table T3_n11 as select * from T1_n50 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_n50 PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n11 +PREHOOK: Output: default@t3_n11 POSTHOOK: query: create table T3_n11 as select * from T1_n50 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_n50 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n11 +POSTHOOK: Output: default@t3_n11 POSTHOOK: Lineage: t3_n11.key SIMPLE [(t1_n50)t1_n50.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t3_n11.val SIMPLE [(t1_n50)t1_n50.FieldSchema(name:val, type:string, comment:null), ] PREHOOK: query: create table T4_n4 (key char(10), val decimal(5,2), b int) @@ -74,14 +74,14 @@ PREHOOK: query: create table T4_n4 (key char(10), val decimal(5,2), b int) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n4 +PREHOOK: Output: default@t4_n4 POSTHOOK: query: create table T4_n4 (key char(10), val decimal(5,2), b int) partitioned by (ds string) clustered by (b) into 10 buckets stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n4 +POSTHOOK: Output: default@t4_n4 PREHOOK: query: alter table T3_n11 rename to newT3_n11 PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: default@t3_n11 @@ -89,7 +89,7 @@ PREHOOK: Output: default@t3_n11 POSTHOOK: query: alter table T3_n11 rename to newT3_n11 POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: default@t3_n11 -POSTHOOK: Output: default@newT3_n11 +POSTHOOK: Output: default@newt3_n11 POSTHOOK: Output: default@t3_n11 PREHOOK: query: alter table T2_n31 set tblproperties ('test'='thisisatest') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -158,11 +158,11 @@ POSTHOOK: Output: default@t4_n4@ds=tomorrow PREHOOK: query: create table T5_n1 (a string, b int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T5_n1 +PREHOOK: Output: default@t5_n1 POSTHOOK: query: create table T5_n1 (a string, b int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T5_n1 +POSTHOOK: Output: default@t5_n1 PREHOOK: query: alter table T5_n1 set fileformat orc PREHOOK: type: ALTERTABLE_FILEFORMAT PREHOOK: Input: default@t5_n1 @@ -174,11 +174,11 @@ POSTHOOK: Output: default@t5_n1 PREHOOK: query: create table T7_n2 (a string, b int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T7_n2 +PREHOOK: Output: default@t7_n2 POSTHOOK: query: create table T7_n2 (a string, b int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T7_n2 +POSTHOOK: Output: default@t7_n2 #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_LOCATION PREHOOK: Input: default@t7_n2 @@ -218,13 +218,13 @@ PREHOOK: query: create view V1_n5 as select key from T1_n50 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@t1_n50 PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n5 +PREHOOK: Output: default@v1_n5 POSTHOOK: query: create view V1_n5 as select key from T1_n50 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@t1_n50 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n5 -POSTHOOK: Lineage: V1_n5.key SIMPLE [(t1_n50)t1_n50.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Output: default@v1_n5 +POSTHOOK: Lineage: v1_n5.key SIMPLE [(t1_n50)t1_n50.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: alter view V1_n5 set tblproperties ('test'='thisisatest') PREHOOK: type: ALTERVIEW_PROPERTIES PREHOOK: Input: default@v1_n5 diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_query1.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_query1.q.out index 603c0f4126..a2632cb1ba 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_query1.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_query1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n20(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n20 +PREHOOK: Output: default@t1_n20 POSTHOOK: query: create table T1_n20(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n20 +POSTHOOK: Output: default@t1_n20 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n20 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n20 PREHOOK: query: create table T2_n12(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n12 +PREHOOK: Output: default@t2_n12 POSTHOOK: query: create table T2_n12(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n12 +POSTHOOK: Output: default@t2_n12 PREHOOK: query: insert into table T2_n12 select * from T1_n20 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n20 diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_query2.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_query2.q.out index 43096a7ba9..f5f54cbc07 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_query2.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_query2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n74(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n74 +PREHOOK: Output: default@t1_n74 POSTHOOK: query: create table T1_n74(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n74 +POSTHOOK: Output: default@t1_n74 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n74 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n74 PREHOOK: query: create table T2_n45(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n45 +PREHOOK: Output: default@t2_n45 POSTHOOK: query: create table T2_n45(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n45 +POSTHOOK: Output: default@t2_n45 PREHOOK: query: insert overwrite table T2_n45 select * from T1_n74 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n74 diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_query3.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_query3.q.out index 5af2f1c0d6..8bb1eabb77 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_query3.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_query3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n111(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n111 +PREHOOK: Output: default@t1_n111 POSTHOOK: query: create table T1_n111(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n111 +POSTHOOK: Output: default@t1_n111 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n111 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n111 PREHOOK: query: create table T2_n67(key string, val string) partitioned by (pval string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n67 +PREHOOK: Output: default@t2_n67 POSTHOOK: query: create table T2_n67(key string, val string) partitioned by (pval string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n67 +POSTHOOK: Output: default@t2_n67 PREHOOK: query: insert into table T2_n67 partition (pval = '1') select * from T1_n111 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n111 diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_query4.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_query4.q.out index dd7b6294c0..3ba2bc65cb 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_query4.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_query4.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n163(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n163 +PREHOOK: Output: default@t1_n163 POSTHOOK: query: create table T1_n163(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n163 +POSTHOOK: Output: default@t1_n163 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n163 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n163 PREHOOK: query: create table T2_n95(key string) partitioned by (val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n95 +PREHOOK: Output: default@t2_n95 POSTHOOK: query: create table T2_n95(key string) partitioned by (val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n95 +POSTHOOK: Output: default@t2_n95 PREHOOK: query: insert overwrite table T2_n95 partition (val) select key, val from T1_n163 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n163 diff --git ql/src/test/results/clientpositive/llap/dbtxnmgr_query5.q.out ql/src/test/results/clientpositive/llap/dbtxnmgr_query5.q.out index ea76a37512..2938d4628e 100644 --- ql/src/test/results/clientpositive/llap/dbtxnmgr_query5.q.out +++ ql/src/test/results/clientpositive/llap/dbtxnmgr_query5.q.out @@ -13,11 +13,11 @@ POSTHOOK: Input: database:foo PREHOOK: query: create table T1_n40(key string, val string) partitioned by (ds string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:foo -PREHOOK: Output: foo@T1_n40 +PREHOOK: Output: foo@t1_n40 POSTHOOK: query: create table T1_n40(key string, val string) partitioned by (ds string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:foo -POSTHOOK: Output: foo@T1_n40 +POSTHOOK: Output: foo@t1_n40 PREHOOK: query: alter table T1_n40 add partition (ds='today') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: foo@t1_n40 @@ -29,13 +29,13 @@ PREHOOK: query: create view V1_n3 as select key from T1_n40 PREHOOK: type: CREATEVIEW PREHOOK: Input: foo@t1_n40 PREHOOK: Output: database:foo -PREHOOK: Output: foo@V1_n3 +PREHOOK: Output: foo@v1_n3 POSTHOOK: query: create view V1_n3 as select key from T1_n40 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: foo@t1_n40 POSTHOOK: Output: database:foo -POSTHOOK: Output: foo@V1_n3 -POSTHOOK: Lineage: V1_n3.key SIMPLE [(t1_n40)t1_n40.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Output: foo@v1_n3 +POSTHOOK: Lineage: v1_n3.key SIMPLE [(t1_n40)t1_n40.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: show tables PREHOOK: type: SHOWTABLES PREHOOK: Input: database:foo diff --git ql/src/test/results/clientpositive/llap/decimal_10_0.q.out ql/src/test/results/clientpositive/llap/decimal_10_0.q.out index fb65b1b7b2..0035f8c983 100644 --- ql/src/test/results/clientpositive/llap/decimal_10_0.q.out +++ ql/src/test/results/clientpositive/llap/decimal_10_0.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE `DECIMAL_n0` (`dec` decimal) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_n0 +PREHOOK: Output: default@decimal_n0 POSTHOOK: query: CREATE TABLE `DECIMAL_n0` (`dec` decimal) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_n0 +POSTHOOK: Output: default@decimal_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE `DECIMAL_n0` PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/decimal_3.q.out ql/src/test/results/clientpositive/llap/decimal_3.q.out index d2e39571fd..40f7a52d4b 100644 --- ql/src/test/results/clientpositive/llap/decimal_3.q.out +++ ql/src/test/results/clientpositive/llap/decimal_3.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3 +PREHOOK: Output: default@decimal_3 POSTHOOK: query: CREATE TABLE DECIMAL_3(key decimal(38,18), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3 +POSTHOOK: Output: default@decimal_3 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/decimal_4.q.out ql/src/test/results/clientpositive/llap/decimal_4.q.out index 9d3ee84f3b..6d04491bc2 100644 --- ql/src/test/results/clientpositive/llap/decimal_4.q.out +++ ql/src/test/results/clientpositive/llap/decimal_4.q.out @@ -12,24 +12,24 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_4_1 +PREHOOK: Output: default@decimal_4_1 POSTHOOK: query: CREATE TABLE DECIMAL_4_1(key decimal(35,25), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_4_1 +POSTHOOK: Output: default@decimal_4_1 PREHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_4_2 +PREHOOK: Output: default@decimal_4_2 POSTHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_4_2 +POSTHOOK: Output: default@decimal_4_2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/decimal_5.q.out ql/src/test/results/clientpositive/llap/decimal_5.q.out index f24588c5cb..a54a4d46db 100644 --- ql/src/test/results/clientpositive/llap/decimal_5.q.out +++ ql/src/test/results/clientpositive/llap/decimal_5.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_5_n0 +PREHOOK: Output: default@decimal_5_n0 POSTHOOK: query: CREATE TABLE DECIMAL_5_n0(key decimal(10,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_5_n0 +POSTHOOK: Output: default@decimal_5_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/decimal_6.q.out ql/src/test/results/clientpositive/llap/decimal_6.q.out index 83cadcef8d..2be969911a 100644 --- ql/src/test/results/clientpositive/llap/decimal_6.q.out +++ ql/src/test/results/clientpositive/llap/decimal_6.q.out @@ -16,28 +16,28 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_1_n0 +PREHOOK: Output: default@decimal_6_1_n0 POSTHOOK: query: CREATE TABLE DECIMAL_6_1_n0(key decimal(10,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_1_n0 +POSTHOOK: Output: default@decimal_6_1_n0 PREHOOK: query: CREATE TABLE DECIMAL_6_2_n0(key decimal(17,4), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_2_n0 +PREHOOK: Output: default@decimal_6_2_n0 POSTHOOK: query: CREATE TABLE DECIMAL_6_2_n0(key decimal(17,4), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_2_n0 +POSTHOOK: Output: default@decimal_6_2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -130,12 +130,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_6_3_n0 AS SELECT key + 5.5 AS k, value * 11 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_6_1_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_3_n0 +PREHOOK: Output: default@decimal_6_3_n0 POSTHOOK: query: CREATE TABLE DECIMAL_6_3_n0 AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1_n0 ORDER BY v POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_6_1_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_3_n0 +POSTHOOK: Output: default@decimal_6_3_n0 POSTHOOK: Lineage: decimal_6_3_n0.k EXPRESSION [(decimal_6_1_n0)decimal_6_1_n0.FieldSchema(name:key, type:decimal(10,5), comment:null), ] POSTHOOK: Lineage: decimal_6_3_n0.v EXPRESSION [(decimal_6_1_n0)decimal_6_1_n0.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: desc DECIMAL_6_3_n0 diff --git ql/src/test/results/clientpositive/llap/decimal_serde.q.out ql/src/test/results/clientpositive/llap/decimal_serde.q.out index 81a4db822a..324c286bf7 100644 --- ql/src/test/results/clientpositive/llap/decimal_serde.q.out +++ ql/src/test/results/clientpositive/llap/decimal_serde.q.out @@ -20,14 +20,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_TEXT +PREHOOK: Output: default@decimal_text POSTHOOK: query: CREATE TABLE DECIMAL_TEXT (key decimal, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_TEXT +POSTHOOK: Output: default@decimal_text PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT PREHOOK: type: LOAD #### A masked pattern was here #### @@ -88,14 +88,14 @@ SELECT * FROM DECIMAL_TEXT PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_text PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_RC +PREHOOK: Output: default@decimal_rc POSTHOOK: query: CREATE TABLE DECIMAL_RC STORED AS RCFile AS SELECT * FROM DECIMAL_TEXT POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_text POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_RC +POSTHOOK: Output: default@decimal_rc POSTHOOK: Lineage: decimal_rc.key SIMPLE [(decimal_text)decimal_text.FieldSchema(name:key, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: decimal_rc.value SIMPLE [(decimal_text)decimal_text.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: describe formatted DECIMAL_RC @@ -140,7 +140,7 @@ SELECT * FROM DECIMAL_RC PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_rc PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_LAZY_COL +PREHOOK: Output: default@decimal_lazy_col POSTHOOK: query: CREATE TABLE DECIMAL_LAZY_COL ROW FORMAT SERDE "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" STORED AS RCFile AS @@ -148,7 +148,7 @@ SELECT * FROM DECIMAL_RC POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_rc POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_LAZY_COL +POSTHOOK: Output: default@decimal_lazy_col POSTHOOK: Lineage: decimal_lazy_col.key SIMPLE [(decimal_rc)decimal_rc.FieldSchema(name:key, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: decimal_lazy_col.value SIMPLE [(decimal_rc)decimal_rc.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: describe formatted DECIMAL_LAZY_COL @@ -196,7 +196,7 @@ SELECT * FROM DECIMAL_LAZY_COL ORDER BY key PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_lazy_col PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_SEQUENCE +PREHOOK: Output: default@decimal_sequence POSTHOOK: query: CREATE TABLE DECIMAL_SEQUENCE ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001' @@ -207,7 +207,7 @@ SELECT * FROM DECIMAL_LAZY_COL ORDER BY key POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_lazy_col POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_SEQUENCE +POSTHOOK: Output: default@decimal_sequence POSTHOOK: Lineage: decimal_sequence.key SIMPLE [(decimal_lazy_col)decimal_lazy_col.FieldSchema(name:key, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: decimal_sequence.value SIMPLE [(decimal_lazy_col)decimal_lazy_col.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: SELECT * FROM DECIMAL_SEQUENCE ORDER BY key, value diff --git ql/src/test/results/clientpositive/llap/decimal_trailing.q.out ql/src/test/results/clientpositive/llap/decimal_trailing.q.out index 070de2b0ba..0cb622d988 100644 --- ql/src/test/results/clientpositive/llap/decimal_trailing.q.out +++ ql/src/test/results/clientpositive/llap/decimal_trailing.q.out @@ -12,7 +12,7 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_TRAILING_n0 +PREHOOK: Output: default@decimal_trailing_n0 POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING_n0 ( id int, a decimal(10,4), @@ -23,7 +23,7 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_TRAILING_n0 +POSTHOOK: Output: default@decimal_trailing_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/default_constraint.q.out ql/src/test/results/clientpositive/llap/default_constraint.q.out index a04da4e2b4..2c6bbd1417 100644 --- ql/src/test/results/clientpositive/llap/default_constraint.q.out +++ ql/src/test/results/clientpositive/llap/default_constraint.q.out @@ -3,13 +3,13 @@ PREHOOK: query: CREATE TABLE numericDataType_n1(a TINYINT CONSTRAINT tinyint_con clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@numericDataType_n1 +PREHOOK: Output: default@numericdatatype_n1 POSTHOOK: query: CREATE TABLE numericDataType_n1(a TINYINT CONSTRAINT tinyint_constraint DEFAULT 127Y ENABLE, b SMALLINT DEFAULT 32767S, c INT DEFAULT 2147483647, d BIGINT DEFAULT 9223372036854775807L, e DOUBLE DEFAULT 3.4E38, f DECIMAL(9,2) DEFAULT 1234567.89) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@numericDataType_n1 +POSTHOOK: Output: default@numericdatatype_n1 PREHOOK: query: DESC FORMATTED numericDataType_n1 PREHOOK: type: DESCTABLE PREHOOK: Input: default@numericdatatype_n1 @@ -1361,13 +1361,13 @@ PREHOOK: query: CREATE TABLE numericDataType_n1(a TINYINT CONSTRAINT tinyint_con clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@numericDataType_n1 +PREHOOK: Output: default@numericdatatype_n1 POSTHOOK: query: CREATE TABLE numericDataType_n1(a TINYINT CONSTRAINT tinyint_constraint DEFAULT 127Y ENABLE, b SMALLINT DEFAULT 32767S, c INT DEFAULT 2147483647, d BIGINT DEFAULT 9223372036854775807L, e DOUBLE DEFAULT 3.4E38, f DECIMAL(9,2) DEFAULT 1234567.89) clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@numericDataType_n1 +POSTHOOK: Output: default@numericdatatype_n1 PREHOOK: query: ALTER TABLE numericDataType_n1 DROP CONSTRAINT tinyint_constraint PREHOOK: type: ALTERTABLE_DROPCONSTRAINT POSTHOOK: query: ALTER TABLE numericDataType_n1 DROP CONSTRAINT tinyint_constraint @@ -2467,13 +2467,13 @@ PREHOOK: query: CREATE TABLE tablePartitioned_n0 (a STRING NOT NULL ENFORCED, ur PARTITIONED BY (p1 STRING, p2 INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tablePartitioned_n0 +PREHOOK: Output: default@tablepartitioned_n0 POSTHOOK: query: CREATE TABLE tablePartitioned_n0 (a STRING NOT NULL ENFORCED, url STRING constraint bdc1 default 'http://localhost', c STRING NOT NULL ENFORCED) PARTITIONED BY (p1 STRING, p2 INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tablePartitioned_n0 +POSTHOOK: Output: default@tablepartitioned_n0 PREHOOK: query: explain INSERT INTO tablePartitioned_n0 partition(p1='today', p2=10) values('not', 'null', 'constraint') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table @@ -2563,13 +2563,13 @@ PREHOOK: query: CREATE TABLE numericDataType_n1(a TINYINT CONSTRAINT tinyint_con clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@numericDataType_n1 +PREHOOK: Output: default@numericdatatype_n1 POSTHOOK: query: CREATE TABLE numericDataType_n1(a TINYINT CONSTRAINT tinyint_constraint DEFAULT 127Y ENABLE, b SMALLINT DEFAULT 32767S, c INT DEFAULT 2147483647, d BIGINT DEFAULT 9223372036854775807L, e DOUBLE DEFAULT 3.4E38, f DECIMAL(9,2) DEFAULT 1234567.89) clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@numericDataType_n1 +POSTHOOK: Output: default@numericdatatype_n1 PREHOOK: query: ALTER TABLE numericDataType_n1 DROP CONSTRAINT tinyint_constraint PREHOOK: type: ALTERTABLE_DROPCONSTRAINT POSTHOOK: query: ALTER TABLE numericDataType_n1 DROP CONSTRAINT tinyint_constraint diff --git ql/src/test/results/clientpositive/llap/drop_partition_with_stats.q.out ql/src/test/results/clientpositive/llap/drop_partition_with_stats.q.out index b4366c3f04..27bff8384b 100644 --- ql/src/test/results/clientpositive/llap/drop_partition_with_stats.q.out +++ ql/src/test/results/clientpositive/llap/drop_partition_with_stats.q.out @@ -71,11 +71,11 @@ POSTHOOK: Output: partstatsdb1@testtable_n0@part1=p11/part2=P12 PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:partstatsdb1 -PREHOOK: Output: partstatsdb1@TestTable1_n1 +PREHOOK: Output: partstatsdb1@testtable1_n1 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:partstatsdb1 -POSTHOOK: Output: partstatsdb1@TestTable1_n1 +POSTHOOK: Output: partstatsdb1@testtable1_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1_n1 PARTITION (part1='p11', Part2='P11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -173,11 +173,11 @@ POSTHOOK: Output: partstatsdb1@testtable1_n1@part1=p11/part2=P12 PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:partstatsdb1 -PREHOOK: Output: partstatsdb1@TESTTABLE2_n1 +PREHOOK: Output: partstatsdb1@testtable2_n1 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:partstatsdb1 -POSTHOOK: Output: partstatsdb1@TESTTABLE2_n1 +POSTHOOK: Output: partstatsdb1@testtable2_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2_n1 PARTITION (part1='p11', Part2='P12') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -298,12 +298,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:partstatsdb2 PREHOOK: query: CREATE TABLE IF NOT EXISTS testtable_n0 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: PARTSTATSDB2@testtable_n0 PREHOOK: Output: database:partstatsdb2 +PREHOOK: Output: partstatsdb2@testtable_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS testtable_n0 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: PARTSTATSDB2@testtable_n0 POSTHOOK: Output: database:partstatsdb2 +POSTHOOK: Output: partstatsdb2@testtable_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable_n0 PARTITION (part1='p11', Part2='P12') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -356,12 +356,12 @@ POSTHOOK: Output: partstatsdb2@testtable_n0 POSTHOOK: Output: partstatsdb2@testtable_n0@part1=p11/part2=P12 PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: PARTSTATSDB2@TestTable1_n1 PREHOOK: Output: database:partstatsdb2 +PREHOOK: Output: partstatsdb2@testtable1_n1 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: PARTSTATSDB2@TestTable1_n1 POSTHOOK: Output: database:partstatsdb2 +POSTHOOK: Output: partstatsdb2@testtable1_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1_n1 PARTITION (part1='p11', Part2='P11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -458,12 +458,12 @@ POSTHOOK: Output: partstatsdb2@testtable1_n1 POSTHOOK: Output: partstatsdb2@testtable1_n1@part1=p11/part2=P12 PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: PARTSTATSDB2@TESTTABLE2_n1 PREHOOK: Output: database:partstatsdb2 +PREHOOK: Output: partstatsdb2@testtable2_n1 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n1 (key STRING, value STRING) PARTITIONED BY (part1 STRING, Part2 STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: PARTSTATSDB2@TESTTABLE2_n1 POSTHOOK: Output: database:partstatsdb2 +POSTHOOK: Output: partstatsdb2@testtable2_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2_n1 PARTITION (part1='p11', Part2='P12') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/drop_table_with_stats.q.out ql/src/test/results/clientpositive/llap/drop_table_with_stats.q.out index f8290886a7..b8e39db414 100644 --- ql/src/test/results/clientpositive/llap/drop_table_with_stats.q.out +++ ql/src/test/results/clientpositive/llap/drop_table_with_stats.q.out @@ -39,11 +39,11 @@ POSTHOOK: Output: tblstatsdb1@testtable PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:tblstatsdb1 -PREHOOK: Output: tblstatsdb1@TestTable1_n0 +PREHOOK: Output: tblstatsdb1@testtable1_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:tblstatsdb1 -POSTHOOK: Output: tblstatsdb1@TestTable1_n0 +POSTHOOK: Output: tblstatsdb1@testtable1_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -65,11 +65,11 @@ POSTHOOK: Output: tblstatsdb1@testtable1_n0 PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:tblstatsdb1 -PREHOOK: Output: tblstatsdb1@TESTTABLE2_n0 +PREHOOK: Output: tblstatsdb1@testtable2_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:tblstatsdb1 -POSTHOOK: Output: tblstatsdb1@TESTTABLE2_n0 +POSTHOOK: Output: tblstatsdb1@testtable2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -134,12 +134,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:tblstatsdb2 PREHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: TBLSTATSDB2@testtable PREHOOK: Output: database:tblstatsdb2 +PREHOOK: Output: tblstatsdb2@testtable POSTHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: TBLSTATSDB2@testtable POSTHOOK: Output: database:tblstatsdb2 +POSTHOOK: Output: tblstatsdb2@testtable PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PREHOOK: type: LOAD #### A masked pattern was here #### @@ -160,12 +160,12 @@ POSTHOOK: Input: tblstatsdb2@testtable POSTHOOK: Output: tblstatsdb2@testtable PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: TBLSTATSDB2@TestTable1_n0 PREHOOK: Output: database:tblstatsdb2 +PREHOOK: Output: tblstatsdb2@testtable1_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: TBLSTATSDB2@TestTable1_n0 POSTHOOK: Output: database:tblstatsdb2 +POSTHOOK: Output: tblstatsdb2@testtable1_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -186,12 +186,12 @@ POSTHOOK: Input: tblstatsdb2@testtable1_n0 POSTHOOK: Output: tblstatsdb2@testtable1_n0 PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: TBLSTATSDB2@TESTTABLE2_n0 PREHOOK: Output: database:tblstatsdb2 +PREHOOK: Output: tblstatsdb2@testtable2_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: TBLSTATSDB2@TESTTABLE2_n0 POSTHOOK: Output: database:tblstatsdb2 +POSTHOOK: Output: tblstatsdb2@testtable2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out index 07f0fcdc90..1ea8fcfe1a 100644 --- ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out +++ ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out @@ -148,10 +148,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: ds string, date string - name: default.srcpart_date_n2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.srcpart_date_n2 Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out index 0b5808269f..c70ac08186 100644 --- ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out +++ ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table acidTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acidTbl +PREHOOK: Output: default@acidtbl POSTHOOK: query: create table acidTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acidTbl +POSTHOOK: Output: default@acidtbl PREHOOK: query: create table nonAcidOrcTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='false') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nonAcidOrcTbl +PREHOOK: Output: default@nonacidorctbl POSTHOOK: query: create table nonAcidOrcTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='false') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nonAcidOrcTbl +POSTHOOK: Output: default@nonacidorctbl PREHOOK: query: explain merge into acidTbl as t using nonAcidOrcTbl s ON t.a = s.a WHEN MATCHED AND s.a > 8 THEN DELETE WHEN MATCHED THEN UPDATE SET b = 7 diff --git ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out index ba0b22232a..0f65b3c010 100644 --- ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out +++ ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out @@ -607,11 +607,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE acid_2L_part(key string, value string) PARTITIONED BY(ds string, hr int) CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acid_2L_part +PREHOOK: Output: default@acid_2l_part POSTHOOK: query: CREATE TABLE acid_2L_part(key string, value string) PARTITIONED BY(ds string, hr int) CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acid_2L_part +POSTHOOK: Output: default@acid_2l_part PREHOOK: query: insert into table acid_2L_part partition(ds,hr) select * from srcpart PREHOOK: type: QUERY PREHOOK: Input: default@srcpart @@ -1067,11 +1067,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE acid_2L_part_sdpo(key string, value string) PARTITIONED BY(ds string, hr int) CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acid_2L_part_sdpo +PREHOOK: Output: default@acid_2l_part_sdpo POSTHOOK: query: CREATE TABLE acid_2L_part_sdpo(key string, value string) PARTITIONED BY(ds string, hr int) CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acid_2L_part_sdpo +POSTHOOK: Output: default@acid_2l_part_sdpo PREHOOK: query: insert into table acid_2L_part_sdpo partition(ds,hr) select * from srcpart PREHOOK: type: QUERY PREHOOK: Input: default@srcpart @@ -1526,11 +1526,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE acid_2L_part_sdpo_no_cp(key string, value string) PARTITIONED BY(ds string, hr int) CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acid_2L_part_sdpo_no_cp +PREHOOK: Output: default@acid_2l_part_sdpo_no_cp POSTHOOK: query: CREATE TABLE acid_2L_part_sdpo_no_cp(key string, value string) PARTITIONED BY(ds string, hr int) CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acid_2L_part_sdpo_no_cp +POSTHOOK: Output: default@acid_2l_part_sdpo_no_cp PREHOOK: query: insert into table acid_2L_part_sdpo_no_cp partition(ds,hr) select * from srcpart PREHOOK: type: QUERY PREHOOK: Input: default@srcpart diff --git ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out index e96363fe29..ec68157c3d 100644 --- ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out +++ ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out @@ -3758,12 +3758,12 @@ PREHOOK: query: CREATE TABLE tablePartitioned (a STRING NOT NULL ENFORCED, b STR PARTITIONED BY (p1 STRING, p2 INT NOT NULL DISABLE) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tablePartitioned +PREHOOK: Output: default@tablepartitioned POSTHOOK: query: CREATE TABLE tablePartitioned (a STRING NOT NULL ENFORCED, b STRING, c STRING NOT NULL ENFORCED) PARTITIONED BY (p1 STRING, p2 INT NOT NULL DISABLE) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tablePartitioned +POSTHOOK: Output: default@tablepartitioned PREHOOK: query: explain INSERT INTO tablePartitioned partition(p1='today', p2=10) values('not', 'null', 'constraint') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/explain_dependency.q.out ql/src/test/results/clientpositive/llap/explain_dependency.q.out index da1036b3c7..a1e9ab8411 100644 --- ql/src/test/results/clientpositive/llap/explain_dependency.q.out +++ ql/src/test/results/clientpositive/llap/explain_dependency.q.out @@ -2,27 +2,27 @@ PREHOOK: query: CREATE VIEW V1_n6 AS SELECT key, value from src PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n6 +PREHOOK: Output: default@v1_n6 POSTHOOK: query: CREATE VIEW V1_n6 AS SELECT key, value from src POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n6 -POSTHOOK: Lineage: V1_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V1_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v1_n6 +POSTHOOK: Lineage: v1_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v1_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V2_n1 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V2_n1 +PREHOOK: Output: default@v2_n1 POSTHOOK: query: CREATE VIEW V2_n1 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V2_n1 -POSTHOOK: Lineage: V2_n1.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] -POSTHOOK: Lineage: V2_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V2_n1.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v2_n1 +POSTHOOK: Lineage: v2_n1.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: v2_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v2_n1.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V3 AS SELECT src1.key, src2.value FROM V2_n1 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -31,7 +31,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@srcpart PREHOOK: Input: default@v2_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@V3 +PREHOOK: Output: default@v3 POSTHOOK: query: CREATE VIEW V3 AS SELECT src1.key, src2.value FROM V2_n1 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -40,9 +40,9 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v2_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V3 -POSTHOOK: Lineage: V3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V3.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v3 +POSTHOOK: Lineage: v3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v3.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V4 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n6 src1 JOIN V2_n1 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -52,7 +52,7 @@ PREHOOK: Input: default@srcpart PREHOOK: Input: default@v1_n6 PREHOOK: Input: default@v2_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@V4 +PREHOOK: Output: default@v4 POSTHOOK: query: CREATE VIEW V4 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n6 src1 JOIN V2_n1 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -62,10 +62,10 @@ POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v1_n6 POSTHOOK: Input: default@v2_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V4 -POSTHOOK: Lineage: V4.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V4.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: V4.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v4 +POSTHOOK: Lineage: v4.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v4.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: v4.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN DEPENDENCY SELECT key, count(1) FROM srcpart WHERE ds IS NOT NULL GROUP BY key PREHOOK: type: QUERY @@ -224,16 +224,16 @@ PREHOOK: query: CREATE VIEW V5 as SELECT * FROM srcpart where ds = '10' PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V5 +PREHOOK: Output: default@v5 POSTHOOK: query: CREATE VIEW V5 as SELECT * FROM srcpart where ds = '10' POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V5 -POSTHOOK: Lineage: V5.ds SIMPLE [] -POSTHOOK: Lineage: V5.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] -POSTHOOK: Lineage: V5.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V5.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v5 +POSTHOOK: Lineage: v5.ds SIMPLE [] +POSTHOOK: Lineage: v5.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: v5.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v5.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN DEPENDENCY SELECT * FROM V5 PREHOOK: type: QUERY PREHOOK: Input: default@srcpart diff --git ql/src/test/results/clientpositive/llap/explainanalyze_2.q.out ql/src/test/results/clientpositive/llap/explainanalyze_2.q.out index 36bd120028..16e59ccc59 100644 --- ql/src/test/results/clientpositive/llap/explainanalyze_2.q.out +++ ql/src/test/results/clientpositive/llap/explainanalyze_2.q.out @@ -1842,19 +1842,19 @@ Stage-7 PREHOOK: query: CREATE TABLE DEST1_n105(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n105 +PREHOOK: Output: default@dest1_n105 POSTHOOK: query: CREATE TABLE DEST1_n105(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n105 +POSTHOOK: Output: default@dest1_n105 PREHOOK: query: CREATE TABLE DEST2_n29(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n29 +PREHOOK: Output: default@dest2_n29 POSTHOOK: query: CREATE TABLE DEST2_n29(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n29 +POSTHOOK: Output: default@dest2_n29 PREHOOK: query: FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 UNION DISTINCT select s2.key as key, s2.value as value from src s2) unionsrc_n3 diff --git ql/src/test/results/clientpositive/llap/explainuser_1.q.out ql/src/test/results/clientpositive/llap/explainuser_1.q.out index 7f0ce5a9c7..c8368c0aed 100644 --- ql/src/test/results/clientpositive/llap/explainuser_1.q.out +++ ql/src/test/results/clientpositive/llap/explainuser_1.q.out @@ -7,7 +7,7 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@src_orc_merge_test_part_n1 Stage-0 - Create Table{"name:":"default.src_orc_merge_test_part_n1"} + Create Table{"name:":"hive.default.src_orc_merge_test_part_n1"} PREHOOK: query: create table src_orc_merge_test_part_n1(key int, value string) partitioned by (ds string, ts string) stored as orc PREHOOK: type: CREATETABLE @@ -3354,7 +3354,7 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@abcd_n1 Stage-0 - Create Table{"name:":"default.abcd_n1"} + Create Table{"name:":"hive.default.abcd_n1"} PREHOOK: query: create table abcd_n1 (a int, b int, c int, d int) PREHOOK: type: CREATETABLE @@ -3441,7 +3441,7 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@src_rc_merge_test_n0 Stage-0 - Create Table{"name:":"default.src_rc_merge_test_n0"} + Create Table{"name:":"hive.default.src_rc_merge_test_n0"} PREHOOK: query: create table src_rc_merge_test_n0(key int, value string) stored as rcfile PREHOOK: type: CREATETABLE @@ -3468,7 +3468,7 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@tgt_rc_merge_test_n0 Stage-0 - Create Table{"name:":"default.tgt_rc_merge_test_n0"} + Create Table{"name:":"hive.default.tgt_rc_merge_test_n0"} PREHOOK: query: create table tgt_rc_merge_test_n0(key int, value string) stored as rcfile PREHOOK: type: CREATETABLE @@ -3692,32 +3692,32 @@ Stage-0 PREHOOK: query: explain create table nzhang_Tmp_n1(a int, b string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_Tmp_n1 +PREHOOK: Output: default@nzhang_tmp_n1 POSTHOOK: query: explain create table nzhang_Tmp_n1(a int, b string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_Tmp_n1 +POSTHOOK: Output: default@nzhang_tmp_n1 Stage-0 - Create Table{"name:":"default.nzhang_Tmp_n1"} + Create Table{"name:":"hive.default.nzhang_tmp_n1"} PREHOOK: query: create table nzhang_Tmp_n1(a int, b string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_Tmp_n1 +PREHOOK: Output: default@nzhang_tmp_n1 POSTHOOK: query: create table nzhang_Tmp_n1(a int, b string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_Tmp_n1 +POSTHOOK: Output: default@nzhang_tmp_n1 PREHOOK: query: explain create table nzhang_CTAS1_n1 as select key k, value from src sort by k, value limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1_n1 +PREHOOK: Output: default@nzhang_ctas1_n1 POSTHOOK: query: explain create table nzhang_CTAS1_n1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1_n1 +POSTHOOK: Output: default@nzhang_ctas1_n1 Plan not optimized by CBO because the statement has sort by with limit Vertex dependency in root stage @@ -3728,7 +3728,7 @@ Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) Stage-3 Stats Work{} Stage-4 - Create Table{"name:":"default.nzhang_CTAS1_n1"} + Create Table{"name:":"hive.default.nzhang_ctas1_n1"} Stage-0 Move Operator Stage-1 @@ -3738,7 +3738,7 @@ Stage-3 Output:["_col0","_col1"],aggregations:["compute_stats(VALUE._col0, 'hll')","compute_stats(VALUE._col2, 'hll')"] <-Reducer 3 [CUSTOM_SIMPLE_EDGE] llap File Output Operator [FS_8] - table:{"name:":"default.nzhang_CTAS1_n1"} + table:{"name:":"default.nzhang_ctas1_n1"} Limit [LIM_7] (rows=10 width=178) Number of rows:10 Select Operator [SEL_6] (rows=10 width=178) @@ -3771,12 +3771,12 @@ PREHOOK: query: create table nzhang_CTAS1_n1 as select key k, value from src sor PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1_n1 +PREHOOK: Output: default@nzhang_ctas1_n1 POSTHOOK: query: create table nzhang_CTAS1_n1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1_n1 +POSTHOOK: Output: default@nzhang_ctas1_n1 POSTHOOK: Lineage: nzhang_ctas1_n1.k SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_ctas1_n1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: explain create table nzhang_ctas3_n1 row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" stored as RCFile as select key/2 half_key, concat(value, "_con") conb from src sort by half_key, conb limit 10 @@ -3799,7 +3799,7 @@ Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) Stage-3 Stats Work{} Stage-4 - Create Table{"name:":"default.nzhang_ctas3_n1"} + Create Table{"name:":"hive.default.nzhang_ctas3_n1"} Stage-0 Move Operator Stage-1 @@ -3868,7 +3868,7 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@acid_dtt Stage-0 - Create Table{"name:":"default.acid_dtt"} + Create Table{"name:":"hive.default.acid_dtt"} PREHOOK: query: create temporary table acid_dtt(a int, b varchar(128)) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE @@ -5458,7 +5458,7 @@ POSTHOOK: Output: default@mfgr_price_view_n3 Plan optimized by CBO. Stage-1 - Create View{"name:":"default.mfgr_price_view_n3","original text:":"select p_mfgr, p_brand, \nsum(p_retailprice) as s \nfrom part \ngroup by p_mfgr, p_brand"} + Create View{"name:":"hive.default.mfgr_price_view_n3","original text:":"select p_mfgr, p_brand, \nsum(p_retailprice) as s \nfrom part \ngroup by p_mfgr, p_brand"} PREHOOK: query: CREATE TABLE part_4_n1( p_mfgr STRING, @@ -5934,35 +5934,35 @@ Stage-0 PREHOOK: query: CREATE TABLE T1_n119(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n119 +PREHOOK: Output: default@t1_n119 POSTHOOK: query: CREATE TABLE T1_n119(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n119 +POSTHOOK: Output: default@t1_n119 PREHOOK: query: CREATE TABLE T2_n70(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n70 +PREHOOK: Output: default@t2_n70 POSTHOOK: query: CREATE TABLE T2_n70(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n70 +POSTHOOK: Output: default@t2_n70 PREHOOK: query: CREATE TABLE T3_n26(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n26 +PREHOOK: Output: default@t3_n26 POSTHOOK: query: CREATE TABLE T3_n26(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n26 +POSTHOOK: Output: default@t3_n26 PREHOOK: query: CREATE TABLE T4_n15(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n15 +PREHOOK: Output: default@t4_n15 POSTHOOK: query: CREATE TABLE T4_n15(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n15 +POSTHOOK: Output: default@t4_n15 PREHOOK: query: CREATE TABLE dest_j1_n16(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/llap/fileformat_sequencefile.q.out ql/src/test/results/clientpositive/llap/fileformat_sequencefile.q.out index 289a2cfea9..c61ac06c88 100644 --- ql/src/test/results/clientpositive/llap/fileformat_sequencefile.q.out +++ ql/src/test/results/clientpositive/llap/fileformat_sequencefile.q.out @@ -19,9 +19,9 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.dest1_n85 input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + name: hive.default.dest1_n85 PREHOOK: query: CREATE TABLE dest1_n85(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' diff --git ql/src/test/results/clientpositive/llap/fileformat_text.q.out ql/src/test/results/clientpositive/llap/fileformat_text.q.out index e660923b31..16bdb1f6c1 100644 --- ql/src/test/results/clientpositive/llap/fileformat_text.q.out +++ ql/src/test/results/clientpositive/llap/fileformat_text.q.out @@ -19,9 +19,9 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.dest1_n107 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + name: hive.default.dest1_n107 PREHOOK: query: CREATE TABLE dest1_n107(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' diff --git ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out index eab45b9315..72dd105f81 100644 --- ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out +++ ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out @@ -2,36 +2,36 @@ PREHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n85 +PREHOOK: Output: default@t1_n85 POSTHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n85 +POSTHOOK: Output: default@t1_n85 PREHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n53 +PREHOOK: Output: default@t2_n53 POSTHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n53 +POSTHOOK: Output: default@t2_n53 PREHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n18 +PREHOOK: Output: default@t3_n18 POSTHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n18 +POSTHOOK: Output: default@t3_n18 PREHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n8 +PREHOOK: Output: default@t4_n8 POSTHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n8 +POSTHOOK: Output: default@t4_n8 PREHOOK: query: insert overwrite table T1_n85 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/llap/groupby7.q.out ql/src/test/results/clientpositive/llap/groupby7.q.out index 3dd3a5e588..67b6ed1877 100644 --- ql/src/test/results/clientpositive/llap/groupby7.q.out +++ ql/src/test/results/clientpositive/llap/groupby7.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n132 +PREHOOK: Output: default@dest1_n132 POSTHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n132 +POSTHOOK: Output: default@dest1_n132 PREHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n34 +PREHOOK: Output: default@dest2_n34 POSTHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n34 +POSTHOOK: Output: default@dest2_n34 PREHOOK: query: FROM SRC INSERT OVERWRITE TABLE DEST1_n132 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key INSERT OVERWRITE TABLE DEST2_n34 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/llap/groupby_grouping_id1.q.out ql/src/test/results/clientpositive/llap/groupby_grouping_id1.q.out index 71fe6c9612..8fc1b7ac0a 100644 --- ql/src/test/results/clientpositive/llap/groupby_grouping_id1.q.out +++ ql/src/test/results/clientpositive/llap/groupby_grouping_id1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n158(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n158 +PREHOOK: Output: default@t1_n158 POSTHOOK: query: CREATE TABLE T1_n158(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n158 +POSTHOOK: Output: default@t1_n158 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n158 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/groupby_grouping_id2.q.out ql/src/test/results/clientpositive/llap/groupby_grouping_id2.q.out index 32d5e8efbc..b82651d730 100644 --- ql/src/test/results/clientpositive/llap/groupby_grouping_id2.q.out +++ ql/src/test/results/clientpositive/llap/groupby_grouping_id2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n123(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n123 +PREHOOK: Output: default@t1_n123 POSTHOOK: query: CREATE TABLE T1_n123(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n123 +POSTHOOK: Output: default@t1_n123 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_n123 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/input1.q.out ql/src/test/results/clientpositive/llap/input1.q.out index 2c41c6e8a1..63e4bc2811 100644 --- ql/src/test/results/clientpositive/llap/input1.q.out +++ ql/src/test/results/clientpositive/llap/input1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE TEST1_n6(A INT, B DOUBLE) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST1_n6 +PREHOOK: Output: default@test1_n6 POSTHOOK: query: CREATE TABLE TEST1_n6(A INT, B DOUBLE) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST1_n6 +POSTHOOK: Output: default@test1_n6 PREHOOK: query: EXPLAIN DESCRIBE TEST1_n6 PREHOOK: type: DESCTABLE @@ -21,7 +21,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Describe Table - table: default.TEST1_n6 + table: default.test1_n6 Stage: Stage-1 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/input10.q.out ql/src/test/results/clientpositive/llap/input10.q.out index 48af674852..50025eb027 100644 --- ql/src/test/results/clientpositive/llap/input10.q.out +++ ql/src/test/results/clientpositive/llap/input10.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE TEST10(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST10 +PREHOOK: Output: default@test10 POSTHOOK: query: CREATE TABLE TEST10(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST10 +POSTHOOK: Output: default@test10 PREHOOK: query: EXPLAIN DESCRIBE TEST10 PREHOOK: type: DESCTABLE @@ -21,7 +21,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Describe Table - table: default.TEST10 + table: default.test10 Stage: Stage-1 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/input15.q.out ql/src/test/results/clientpositive/llap/input15.q.out index cd57d3dd75..0af5fedab4 100644 --- ql/src/test/results/clientpositive/llap/input15.q.out +++ ql/src/test/results/clientpositive/llap/input15.q.out @@ -2,12 +2,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST15 +PREHOOK: Output: default@test15 POSTHOOK: query: EXPLAIN CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST15 +POSTHOOK: Output: default@test15 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -15,20 +15,20 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.TEST15 field delimiter: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.test15 PREHOOK: query: CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST15 +PREHOOK: Output: default@test15 POSTHOOK: query: CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST15 +POSTHOOK: Output: default@test15 PREHOOK: query: DESCRIBE TEST15 PREHOOK: type: DESCTABLE PREHOOK: Input: default@test15 diff --git ql/src/test/results/clientpositive/llap/input16.q.out ql/src/test/results/clientpositive/llap/input16.q.out index a9b454f2a3..0fd5649c31 100644 --- ql/src/test/results/clientpositive/llap/input16.q.out +++ ql/src/test/results/clientpositive/llap/input16.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT16 +PREHOOK: Output: default@input16 POSTHOOK: query: CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT16 +POSTHOOK: Output: default@input16 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT16 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/input16_cc.q.out ql/src/test/results/clientpositive/llap/input16_cc.q.out index 1b61e16b14..187e23ab6d 100644 --- ql/src/test/results/clientpositive/llap/input16_cc.q.out +++ ql/src/test/results/clientpositive/llap/input16_cc.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT16_CC +PREHOOK: Output: default@input16_cc POSTHOOK: query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT16_CC +POSTHOOK: Output: default@input16_cc PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/input2.q.out ql/src/test/results/clientpositive/llap/input2.q.out index 28f7da4f02..1662c7d7fb 100644 --- ql/src/test/results/clientpositive/llap/input2.q.out +++ ql/src/test/results/clientpositive/llap/input2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE TEST2a(A INT, B DOUBLE) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST2a +PREHOOK: Output: default@test2a POSTHOOK: query: CREATE TABLE TEST2a(A INT, B DOUBLE) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST2a +POSTHOOK: Output: default@test2a PREHOOK: query: DESCRIBE TEST2a PREHOOK: type: DESCTABLE PREHOOK: Input: default@test2a @@ -25,11 +25,11 @@ b double PREHOOK: query: CREATE TABLE TEST2b(A ARRAY, B DOUBLE, C MAP) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST2b +PREHOOK: Output: default@test2b POSTHOOK: query: CREATE TABLE TEST2b(A ARRAY, B DOUBLE, C MAP) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST2b +POSTHOOK: Output: default@test2b PREHOOK: query: DESCRIBE TEST2b PREHOOK: type: DESCTABLE PREHOOK: Input: default@test2b diff --git ql/src/test/results/clientpositive/llap/input3.q.out ql/src/test/results/clientpositive/llap/input3.q.out index 0365ff25ba..c8725404f5 100644 --- ql/src/test/results/clientpositive/llap/input3.q.out +++ ql/src/test/results/clientpositive/llap/input3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE TEST3a(A INT, B DOUBLE) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST3a +PREHOOK: Output: default@test3a POSTHOOK: query: CREATE TABLE TEST3a(A INT, B DOUBLE) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST3a +POSTHOOK: Output: default@test3a PREHOOK: query: DESCRIBE TEST3a PREHOOK: type: DESCTABLE PREHOOK: Input: default@test3a @@ -17,11 +17,11 @@ b double PREHOOK: query: CREATE TABLE TEST3b(A ARRAY, B DOUBLE, C MAP) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST3b +PREHOOK: Output: default@test3b POSTHOOK: query: CREATE TABLE TEST3b(A ARRAY, B DOUBLE, C MAP) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST3b +POSTHOOK: Output: default@test3b PREHOOK: query: DESCRIBE TEST3b PREHOOK: type: DESCTABLE PREHOOK: Input: default@test3b @@ -55,7 +55,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Add Columns - table name: default.TEST3b + table name: hive.default.test3b new columns: x double PREHOOK: query: ALTER TABLE TEST3b ADD COLUMNS (X DOUBLE) @@ -92,8 +92,8 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Rename Table - table name: default.TEST3b - new table name: default.TEST3c + table name: hive.default.test3b + new table name: hive.default.test3c PREHOOK: query: ALTER TABLE TEST3b RENAME TO TEST3c PREHOOK: type: ALTERTABLE_RENAME @@ -102,8 +102,8 @@ PREHOOK: Output: default@test3b POSTHOOK: query: ALTER TABLE TEST3b RENAME TO TEST3c POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: default@test3b -POSTHOOK: Output: default@TEST3c POSTHOOK: Output: default@test3b +POSTHOOK: Output: default@test3c PREHOOK: query: DESCRIBE TEST3c PREHOOK: type: DESCTABLE PREHOOK: Input: default@test3c @@ -138,7 +138,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Replace Columns - table name: default.TEST3c + table name: hive.default.test3c new columns: r1 int, r2 double PREHOOK: query: ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 DOUBLE) diff --git ql/src/test/results/clientpositive/llap/input4_cb_delim.q.out ql/src/test/results/clientpositive/llap/input4_cb_delim.q.out index 97c0c1f371..a1eff784b9 100644 --- ql/src/test/results/clientpositive/llap/input4_cb_delim.q.out +++ ql/src/test/results/clientpositive/llap/input4_cb_delim.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT4_CB +PREHOOK: Output: default@input4_cb POSTHOOK: query: CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT4_CB +POSTHOOK: Output: default@input4_cb PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/inputddl1.q.out ql/src/test/results/clientpositive/llap/inputddl1.q.out index 8dff7c7072..7b790257b8 100644 --- ql/src/test/results/clientpositive/llap/inputddl1.q.out +++ ql/src/test/results/clientpositive/llap/inputddl1.q.out @@ -2,12 +2,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL1 +PREHOOK: Output: default@inputddl1 POSTHOOK: query: EXPLAIN CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL1 +POSTHOOK: Output: default@inputddl1 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -15,19 +15,19 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.INPUTDDL1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.inputddl1 PREHOOK: query: CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL1 +PREHOOK: Output: default@inputddl1 POSTHOOK: query: CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL1 +POSTHOOK: Output: default@inputddl1 PREHOOK: query: SELECT INPUTDDL1.* from INPUTDDL1 PREHOOK: type: QUERY PREHOOK: Input: default@inputddl1 diff --git ql/src/test/results/clientpositive/llap/inputddl2.q.out ql/src/test/results/clientpositive/llap/inputddl2.q.out index 0a7e057630..52c6022d20 100644 --- ql/src/test/results/clientpositive/llap/inputddl2.q.out +++ ql/src/test/results/clientpositive/llap/inputddl2.q.out @@ -2,12 +2,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL2 +PREHOOK: Output: default@inputddl2 POSTHOOK: query: EXPLAIN CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL2 +POSTHOOK: Output: default@inputddl2 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -15,20 +15,20 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.INPUTDDL2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat partition columns: ds string, country string serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.inputddl2 PREHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL2 +PREHOOK: Output: default@inputddl2 POSTHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL2 +POSTHOOK: Output: default@inputddl2 PREHOOK: query: DESCRIBE INPUTDDL2 PREHOOK: type: DESCTABLE PREHOOK: Input: default@inputddl2 diff --git ql/src/test/results/clientpositive/llap/inputddl3.q.out ql/src/test/results/clientpositive/llap/inputddl3.q.out index bcd2b6accb..d2e908910f 100644 --- ql/src/test/results/clientpositive/llap/inputddl3.q.out +++ ql/src/test/results/clientpositive/llap/inputddl3.q.out @@ -2,12 +2,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL3 +PREHOOK: Output: default@inputddl3 POSTHOOK: query: EXPLAIN CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL3 +POSTHOOK: Output: default@inputddl3 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -15,20 +15,20 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key int, value string - name: default.INPUTDDL3 field delimiter: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.inputddl3 PREHOOK: query: CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL3 +PREHOOK: Output: default@inputddl3 POSTHOOK: query: CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL3 +POSTHOOK: Output: default@inputddl3 PREHOOK: query: DESCRIBE INPUTDDL3 PREHOOK: type: DESCTABLE PREHOOK: Input: default@inputddl3 diff --git ql/src/test/results/clientpositive/llap/inputddl4.q.out ql/src/test/results/clientpositive/llap/inputddl4.q.out index 887ac6fc38..1b6ad6dad0 100644 --- ql/src/test/results/clientpositive/llap/inputddl4.q.out +++ ql/src/test/results/clientpositive/llap/inputddl4.q.out @@ -7,7 +7,7 @@ PREHOOK: query: CREATE TABLE INPUTDDL4(viewTime STRING, userid INT, CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL4 +PREHOOK: Output: default@inputddl4 POSTHOOK: query: CREATE TABLE INPUTDDL4(viewTime STRING, userid INT, page_url STRING, referrer_url STRING, friends ARRAY, properties MAP, @@ -17,7 +17,7 @@ POSTHOOK: query: CREATE TABLE INPUTDDL4(viewTime STRING, userid INT, CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL4 +POSTHOOK: Output: default@inputddl4 PREHOOK: query: DESCRIBE INPUTDDL4 PREHOOK: type: DESCTABLE PREHOOK: Input: default@inputddl4 diff --git ql/src/test/results/clientpositive/llap/inputddl5.q.out ql/src/test/results/clientpositive/llap/inputddl5.q.out index c39c05d226..d0abe9e9d8 100644 --- ql/src/test/results/clientpositive/llap/inputddl5.q.out +++ ql/src/test/results/clientpositive/llap/inputddl5.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE INPUTDDL5(name STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL5 +PREHOOK: Output: default@inputddl5 POSTHOOK: query: CREATE TABLE INPUTDDL5(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL5 +POSTHOOK: Output: default@inputddl5 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE INPUTDDL5 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/inputddl6.q.out ql/src/test/results/clientpositive/llap/inputddl6.q.out index d864c82424..8211139f0b 100644 --- ql/src/test/results/clientpositive/llap/inputddl6.q.out +++ ql/src/test/results/clientpositive/llap/inputddl6.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL6 +PREHOOK: Output: default@inputddl6 POSTHOOK: query: CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL6 +POSTHOOK: Output: default@inputddl6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -92,7 +92,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Describe Table - table: default.INPUTDDL6 + table: default.inputddl6 partition: ds 2008-04-09 extended: true diff --git ql/src/test/results/clientpositive/llap/inputddl7.q.out ql/src/test/results/clientpositive/llap/inputddl7.q.out index c2f9f97837..68c9423eb3 100644 --- ql/src/test/results/clientpositive/llap/inputddl7.q.out +++ ql/src/test/results/clientpositive/llap/inputddl7.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n117(name STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n117 +PREHOOK: Output: default@t1_n117 POSTHOOK: query: CREATE TABLE T1_n117(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n117 +POSTHOOK: Output: default@t1_n117 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1_n117 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -26,11 +26,11 @@ POSTHOOK: Input: default@t1_n117 PREHOOK: query: CREATE TABLE T2_n69(name STRING) STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n69 +PREHOOK: Output: default@t2_n69 POSTHOOK: query: CREATE TABLE T2_n69(name STRING) STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n69 +POSTHOOK: Output: default@t2_n69 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T2_n69 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -51,11 +51,11 @@ POSTHOOK: Input: default@t2_n69 PREHOOK: query: CREATE TABLE T3_n25(name STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n25 +PREHOOK: Output: default@t3_n25 POSTHOOK: query: CREATE TABLE T3_n25(name STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n25 +POSTHOOK: Output: default@t3_n25 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3_n25 PARTITION (ds='2008-04-09') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -79,11 +79,11 @@ POSTHOOK: Input: default@t3_n25@ds=2008-04-09 PREHOOK: query: CREATE TABLE T4_n14(name STRING) PARTITIONED BY(ds STRING) STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n14 +PREHOOK: Output: default@t4_n14 POSTHOOK: query: CREATE TABLE T4_n14(name STRING) PARTITIONED BY(ds STRING) STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n14 +POSTHOOK: Output: default@t4_n14 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T4_n14 PARTITION (ds='2008-04-09') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/inputddl8.q.out ql/src/test/results/clientpositive/llap/inputddl8.q.out index eb29583e81..4abc8b759a 100644 --- ql/src/test/results/clientpositive/llap/inputddl8.q.out +++ ql/src/test/results/clientpositive/llap/inputddl8.q.out @@ -7,7 +7,7 @@ PREHOOK: query: CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table' STORED AS SEQUENCEFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUTDDL8 +PREHOOK: Output: default@inputddl8 POSTHOOK: query: CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table' PARTITIONED BY(ds STRING, country STRING) CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS @@ -17,7 +17,7 @@ POSTHOOK: query: CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table' STORED AS SEQUENCEFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUTDDL8 +POSTHOOK: Output: default@inputddl8 PREHOOK: query: DESCRIBE EXTENDED INPUTDDL8 PREHOOK: type: DESCTABLE PREHOOK: Input: default@inputddl8 diff --git ql/src/test/results/clientpositive/llap/insert1_overwrite_partitions.q.out ql/src/test/results/clientpositive/llap/insert1_overwrite_partitions.q.out index 09f292f936..c3f2fb5057 100644 --- ql/src/test/results/clientpositive/llap/insert1_overwrite_partitions.q.out +++ ql/src/test/results/clientpositive/llap/insert1_overwrite_partitions.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE sourceTable (one string,two string) PARTITIONED BY (ds string,hr string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@sourceTable +PREHOOK: Output: default@sourcetable POSTHOOK: query: CREATE TABLE sourceTable (one string,two string) PARTITIONED BY (ds string,hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@sourceTable +POSTHOOK: Output: default@sourcetable PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -27,11 +27,11 @@ POSTHOOK: Output: default@sourcetable@ds=2011-11-11/hr=12 PREHOOK: query: CREATE TABLE destinTable (one string,two string) PARTITIONED BY (ds string,hr string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destinTable +PREHOOK: Output: default@destintable POSTHOOK: query: CREATE TABLE destinTable (one string,two string) PARTITIONED BY (ds string,hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destinTable +POSTHOOK: Output: default@destintable PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE destinTable PARTITION (ds='2011-11-11', hr='11') if not exists SELECT one,two FROM sourceTable WHERE ds='2011-11-11' AND hr='11' order by one desc, two desc limit 5 PREHOOK: type: QUERY @@ -234,11 +234,11 @@ POSTHOOK: Output: default@destintable PREHOOK: query: CREATE TABLE destinTable (one string,two string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destinTable +PREHOOK: Output: default@destintable POSTHOOK: query: CREATE TABLE destinTable (one string,two string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destinTable +POSTHOOK: Output: default@destintable PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE destinTable SELECT one,two FROM sourceTable WHERE ds='2011-11-11' AND hr='11' order by one desc, two desc limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@sourcetable @@ -389,11 +389,11 @@ POSTHOOK: Output: default@sourcetable PREHOOK: query: CREATE TABLE sourceTable (one string,two string) PARTITIONED BY (DS string,HR string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@sourceTable +PREHOOK: Output: default@sourcetable POSTHOOK: query: CREATE TABLE sourceTable (one string,two string) PARTITIONED BY (DS string,HR string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@sourceTable +POSTHOOK: Output: default@sourcetable PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE sourceTable partition(DS='2011-11-11', HR='11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -406,11 +406,11 @@ POSTHOOK: Output: default@sourcetable@ds=2011-11-11/hr=11 PREHOOK: query: CREATE TABLE destinTable (one string,two string) PARTITIONED BY (DS string,HR string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destinTable +PREHOOK: Output: default@destintable POSTHOOK: query: CREATE TABLE destinTable (one string,two string) PARTITIONED BY (DS string,HR string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destinTable +POSTHOOK: Output: default@destintable PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE destinTable PARTITION (DS='2011-11-11', HR='11') if not exists SELECT one,two FROM sourceTable WHERE DS='2011-11-11' AND HR='11' order by one desc, two desc limit 5 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out index b7355fb2d2..d0a4280980 100644 --- ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out +++ ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out @@ -2603,13 +2603,13 @@ clustered by (value) into 2 buckets stored as orc tblproperties ("transactional"="true") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acidTable +PREHOOK: Output: default@acidtable POSTHOOK: query: create table acidTable(key int NOT NULL enable, a1 string DEFAULT 'a1', value string) clustered by (value) into 2 buckets stored as orc tblproperties ("transactional"="true") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acidTable +POSTHOOK: Output: default@acidtable PREHOOK: query: insert into acidTable values(1, 'a10','val100') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table @@ -3210,12 +3210,12 @@ PREHOOK: query: create table acidTable2(key int DEFAULT 404) clustered by (key) tblproperties ("transactional"="true") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acidTable2 +PREHOOK: Output: default@acidtable2 POSTHOOK: query: create table acidTable2(key int DEFAULT 404) clustered by (key) into 2 buckets stored as orc tblproperties ("transactional"="true") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acidTable2 +POSTHOOK: Output: default@acidtable2 PREHOOK: query: explain MERGE INTO acidTable2 as t using nonacid_n1 as s ON t.key = s.key WHEN NOT MATCHED THEN INSERT VALUES (DEFAULT) PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/join_array.q.out ql/src/test/results/clientpositive/llap/join_array.q.out index 66c143afba..2868fca70f 100644 --- ql/src/test/results/clientpositive/llap/join_array.q.out +++ ql/src/test/results/clientpositive/llap/join_array.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table tinyA(a bigint, b bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tinyA +PREHOOK: Output: default@tinya POSTHOOK: query: create table tinyA(a bigint, b bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tinyA +POSTHOOK: Output: default@tinya PREHOOK: query: create table tinyB(a bigint, bList array) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tinyB +PREHOOK: Output: default@tinyb POSTHOOK: query: create table tinyB(a bigint, bList array) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tinyB +POSTHOOK: Output: default@tinyb PREHOOK: query: load data local inpath '../../data/files/tiny_a.txt' into table tinyA PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/join_casesensitive.q.out ql/src/test/results/clientpositive/llap/join_casesensitive.q.out index f9bc3c024a..23ee3f9466 100644 --- ql/src/test/results/clientpositive/llap/join_casesensitive.q.out +++ ql/src/test/results/clientpositive/llap/join_casesensitive.q.out @@ -17,11 +17,11 @@ POSTHOOK: Output: default@joinone PREHOOK: query: CREATE TABLE joinTwo(key1 int, key2 int, value int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@joinTwo +PREHOOK: Output: default@jointwo POSTHOOK: query: CREATE TABLE joinTwo(key1 int, key2 int, value int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@joinTwo +POSTHOOK: Output: default@jointwo PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in6.txt' INTO TABLE joinTwo PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/join_reordering_no_stats.q.out ql/src/test/results/clientpositive/llap/join_reordering_no_stats.q.out index 4e90697fd6..37323a4ee1 100644 --- ql/src/test/results/clientpositive/llap/join_reordering_no_stats.q.out +++ ql/src/test/results/clientpositive/llap/join_reordering_no_stats.q.out @@ -380,12 +380,12 @@ PREHOOK: query: CREATE TABLE Employee_Part_n1(employeeID int, employeeName Strin row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part_n1 +PREHOOK: Output: default@employee_part_n1 POSTHOOK: query: CREATE TABLE Employee_Part_n1(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part_n1 +POSTHOOK: Output: default@employee_part_n1 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part_n1 partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/keyword_3.q.out ql/src/test/results/clientpositive/llap/keyword_3.q.out index 067fdd2d83..2f767feade 100644 --- ql/src/test/results/clientpositive/llap/keyword_3.q.out +++ ql/src/test/results/clientpositive/llap/keyword_3.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table NULLS (LAST string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@NULLS +PREHOOK: Output: default@nulls POSTHOOK: query: create table NULLS (LAST string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@NULLS +POSTHOOK: Output: default@nulls PREHOOK: query: insert overwrite table NULLS select key from src where key = '238' limit 1 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/kryo.q.out ql/src/test/results/clientpositive/llap/kryo.q.out index bdaa9cb160..91b56beeb2 100644 --- ql/src/test/results/clientpositive/llap/kryo.q.out +++ ql/src/test/results/clientpositive/llap/kryo.q.out @@ -10,7 +10,7 @@ PREHOOK: query: CREATE TABLE `DateTime` ( STORED AS orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DateTime +PREHOOK: Output: default@datetime POSTHOOK: query: CREATE TABLE `DateTime` ( ID INT, `Time` STRING, @@ -23,7 +23,7 @@ POSTHOOK: query: CREATE TABLE `DateTime` ( STORED AS orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DateTime +POSTHOOK: Output: default@datetime PREHOOK: query: explain SELECT IF(((`DateTime_`.`id` = 1) OR (`DateTime_`.`id` = 2)),IF((`DateTime_`.`id` = 1),(CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN TO_DATE(`DateTime_`.`datetime`) ELSE NULL END),(CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN TO_DATE(`DateTime_`.`datetime`) ELSE TO_DATE(`DateTime_`.`time`) END)),IF((`DateTime_`.`id` = 3),(CASE WHEN (`DateTime_`.`date_` IS NULL) OR (TO_DATE(`DateTime_`.`datetime`) IS NULL) THEN NULL WHEN `DateTime_`.`date_` > TO_DATE(`DateTime_`.`datetime`) THEN `DateTime_`.`date_` ELSE TO_DATE(`DateTime_`.`datetime`) END),NULL)) AS `none_calculation2_ok`, `DateTime_`.`id` AS `none_id_ok`, (CASE WHEN (`DateTime_`.`id` = 6) THEN (CASE WHEN (CONCAT(`DateTime_`.`date_`, ' 00:00:00') IS NULL) OR (`DateTime_`.`datetime` IS NULL) THEN NULL WHEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') > `DateTime_`.`datetime` THEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') ELSE `DateTime_`.`datetime` END) WHEN (`DateTime_`.`id` = 5) THEN (CASE WHEN (`DateTime_`.`date_` IS NULL) OR (`DateTime_`.`datetime` IS NULL) THEN NULL WHEN `DateTime_`.`date_` > `DateTime_`.`datetime` THEN `DateTime_`.`date_` ELSE `DateTime_`.`datetime` END) WHEN (`DateTime_`.`id` = 3) THEN (CASE WHEN true THEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') WHEN NOT true THEN `DateTime_`.`datetime` ELSE NULL END) WHEN (`DateTime_`.`id` = 4) THEN (CASE WHEN true THEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') WHEN NOT true THEN `DateTime_`.`datetime` ELSE `DateTime_`.`time` END) WHEN (`DateTime_`.`id` = 1) THEN (CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN `DateTime_`.`datetime` ELSE NULL END) WHEN (`DateTime_`.`id` = 2) THEN (CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN `DateTime_`.`datetime` ELSE `DateTime_`.`time` END) ELSE NULL END) AS `none_mixed_type_calcs_ok` FROM `DateTime` `DateTime_` WHERE (`DateTime_`.`id` = 1 OR `DateTime_`.`id` = 2 OR `DateTime_`.`id` = 3 OR `DateTime_`.`id` = 4 OR `DateTime_`.`id` = 5 OR `DateTime_`.`id` = 6) GROUP BY IF(((`DateTime_`.`id` = 1) OR (`DateTime_`.`id` = 2)),IF((`DateTime_`.`id` = 1),(CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN TO_DATE(`DateTime_`.`datetime`) ELSE NULL END),(CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN TO_DATE(`DateTime_`.`datetime`) ELSE TO_DATE(`DateTime_`.`time`) END)),IF((`DateTime_`.`id` = 3),(CASE WHEN (`DateTime_`.`date_` IS NULL) OR (TO_DATE(`DateTime_`.`datetime`) IS NULL) THEN NULL WHEN `DateTime_`.`date_` > TO_DATE(`DateTime_`.`datetime`) THEN `DateTime_`.`date_` ELSE TO_DATE(`DateTime_`.`datetime`) END),NULL)), `DateTime_`.`id`, (CASE WHEN (`DateTime_`.`id` = 6) THEN (CASE WHEN (CONCAT(`DateTime_`.`date_`, ' 00:00:00') IS NULL) OR (`DateTime_`.`datetime` IS NULL) THEN NULL WHEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') > `DateTime_`.`datetime` THEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') ELSE `DateTime_`.`datetime` END) WHEN (`DateTime_`.`id` = 5) THEN (CASE WHEN (`DateTime_`.`date_` IS NULL) OR (`DateTime_`.`datetime` IS NULL) THEN NULL WHEN `DateTime_`.`date_` > `DateTime_`.`datetime` THEN `DateTime_`.`date_` ELSE `DateTime_`.`datetime` END) WHEN (`DateTime_`.`id` = 3) THEN (CASE WHEN true THEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') WHEN NOT true THEN `DateTime_`.`datetime` ELSE NULL END) WHEN (`DateTime_`.`id` = 4) THEN (CASE WHEN true THEN CONCAT(`DateTime_`.`date_`, ' 00:00:00') WHEN NOT true THEN `DateTime_`.`datetime` ELSE `DateTime_`.`time` END) WHEN (`DateTime_`.`id` = 1) THEN (CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN `DateTime_`.`datetime` ELSE NULL END) WHEN (`DateTime_`.`id` = 2) THEN (CASE WHEN true THEN `DateTime_`.`date_` WHEN NOT true THEN `DateTime_`.`datetime` ELSE `DateTime_`.`time` END) ELSE NULL END) PREHOOK: type: QUERY PREHOOK: Input: default@datetime diff --git ql/src/test/results/clientpositive/llap/leftsemijoin_mr.q.out ql/src/test/results/clientpositive/llap/leftsemijoin_mr.q.out index 0c2c2d5f97..f68ff65bf5 100644 --- ql/src/test/results/clientpositive/llap/leftsemijoin_mr.q.out +++ ql/src/test/results/clientpositive/llap/leftsemijoin_mr.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n43(key INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n43 +PREHOOK: Output: default@t1_n43 POSTHOOK: query: CREATE TABLE T1_n43(key INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n43 +POSTHOOK: Output: default@t1_n43 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t1.txt' INTO TABLE T1_n43 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@t1_n43 PREHOOK: query: CREATE TABLE T2_n27(key INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n27 +PREHOOK: Output: default@t2_n27 POSTHOOK: query: CREATE TABLE T2_n27(key INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n27 +POSTHOOK: Output: default@t2_n27 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t2.txt' INTO TABLE T2_n27 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/materialized_view_cluster.q.out ql/src/test/results/clientpositive/llap/materialized_view_cluster.q.out index 9af6567987..adb3500f80 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_cluster.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_cluster.q.out @@ -79,7 +79,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.cluster_mv_1 + name: hive.default.cluster_mv_1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -120,7 +120,7 @@ STAGE PLANS: columns: value string, key string sort columns: key string expanded text: SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250 - name: default.cluster_mv_1 + name: hive.default.cluster_mv_1 original text: SELECT value, key FROM src_txn where key > 200 and key < 250 rewrite enabled: true @@ -134,7 +134,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.cluster_mv_1 + name: hive.default.cluster_mv_1 retrieve and include: true Stage: Stage-0 diff --git ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out index 2d11b3fca1..8ce3a6fb99 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out @@ -173,7 +173,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.cmv_mat_view + name: hive.default.cmv_mat_view Select Operator expressions: _col0 (type: int), _col1 (type: decimal(10,2)) outputColumnNames: col1, col2 @@ -215,7 +215,7 @@ STAGE PLANS: FROM `default`.`cmv_basetable` JOIN `default`.`cmv_basetable_2` ON (`cmv_basetable`.`a` = `cmv_basetable_2`.`a`) WHERE `cmv_basetable_2`.`c` > 10.0 GROUP BY `cmv_basetable`.`a`, `cmv_basetable_2`.`c` - name: default.cmv_mat_view + name: hive.default.cmv_mat_view original text: SELECT cmv_basetable.a, cmv_basetable_2.c FROM cmv_basetable JOIN cmv_basetable_2 ON (cmv_basetable.a = cmv_basetable_2.a) WHERE cmv_basetable_2.c > 10.0 @@ -232,7 +232,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.cmv_mat_view + name: hive.default.cmv_mat_view retrieve and include: true Stage: Stage-0 diff --git ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out index 13d7f5a756..056de0fa81 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out @@ -176,7 +176,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.cmv_mat_view_n5 + name: hive.default.cmv_mat_view_n5 Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: decimal(10,2)), _col2 (type: bigint) @@ -221,7 +221,7 @@ STAGE PLANS: FROM `default`.`cmv_basetable_n5` JOIN `default`.`cmv_basetable_2_n2` ON (`cmv_basetable_n5`.`a` = `cmv_basetable_2_n2`.`a`) WHERE `cmv_basetable_2_n2`.`c` > 10.0 GROUP BY `cmv_basetable_n5`.`a`, `cmv_basetable_2_n2`.`c` - name: default.cmv_mat_view_n5 + name: hive.default.cmv_mat_view_n5 original text: SELECT cmv_basetable_n5.a, cmv_basetable_2_n2.c, sum(cmv_basetable_2_n2.d) FROM cmv_basetable_n5 JOIN cmv_basetable_2_n2 ON (cmv_basetable_n5.a = cmv_basetable_2_n2.a) WHERE cmv_basetable_2_n2.c > 10.0 @@ -237,7 +237,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.cmv_mat_view_n5 + name: hive.default.cmv_mat_view_n5 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out index 0a106ba0a3..8a65aba6df 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out @@ -173,7 +173,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.cmv_mat_view_n1 + name: hive.default.cmv_mat_view_n1 Select Operator expressions: _col0 (type: int), _col1 (type: decimal(10,2)) outputColumnNames: col1, col2 @@ -215,7 +215,7 @@ STAGE PLANS: FROM `default`.`cmv_basetable_n1` JOIN `default`.`cmv_basetable_2_n0` ON (`cmv_basetable_n1`.`a` = `cmv_basetable_2_n0`.`a`) WHERE `cmv_basetable_2_n0`.`c` > 10.0 GROUP BY `cmv_basetable_n1`.`a`, `cmv_basetable_2_n0`.`c` - name: default.cmv_mat_view_n1 + name: hive.default.cmv_mat_view_n1 original text: SELECT cmv_basetable_n1.a, cmv_basetable_2_n0.c FROM cmv_basetable_n1 JOIN cmv_basetable_2_n0 ON (cmv_basetable_n1.a = cmv_basetable_2_n0.a) WHERE cmv_basetable_2_n0.c > 10.0 @@ -232,7 +232,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.cmv_mat_view_n1 + name: hive.default.cmv_mat_view_n1 retrieve and include: true Stage: Stage-0 diff --git ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out index 6cce2a45f7..69e7563003 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out @@ -173,7 +173,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.cmv_mat_view_n3 + name: hive.default.cmv_mat_view_n3 Select Operator expressions: _col0 (type: int), _col1 (type: decimal(10,2)) outputColumnNames: col1, col2 @@ -217,7 +217,7 @@ STAGE PLANS: FROM `default`.`cmv_basetable_n3` JOIN `default`.`cmv_basetable_2_n1` ON (`cmv_basetable_n3`.`a` = `cmv_basetable_2_n1`.`a`) WHERE `cmv_basetable_2_n1`.`c` > 10.0 GROUP BY `cmv_basetable_n3`.`a`, `cmv_basetable_2_n1`.`c` - name: default.cmv_mat_view_n3 + name: hive.default.cmv_mat_view_n3 original text: SELECT cmv_basetable_n3.a, cmv_basetable_2_n1.c FROM cmv_basetable_n3 JOIN cmv_basetable_2_n1 ON (cmv_basetable_n3.a = cmv_basetable_2_n1.a) WHERE cmv_basetable_2_n1.c > 10.0 @@ -233,7 +233,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.cmv_mat_view_n3 + name: hive.default.cmv_mat_view_n3 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/llap/materialized_view_distribute_sort.q.out ql/src/test/results/clientpositive/llap/materialized_view_distribute_sort.q.out index 5961735f29..5b2ad1838d 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_distribute_sort.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_distribute_sort.q.out @@ -79,7 +79,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.distribute_mv_1 + name: hive.default.distribute_mv_1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -120,7 +120,7 @@ STAGE PLANS: columns: value string, key string sort columns: value string expanded text: SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250 - name: default.distribute_mv_1 + name: hive.default.distribute_mv_1 original text: SELECT value, key FROM src_txn where key > 200 and key < 250 rewrite enabled: true @@ -134,7 +134,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.distribute_mv_1 + name: hive.default.distribute_mv_1 retrieve and include: true Stage: Stage-0 @@ -357,7 +357,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.distribute_mv_2 + name: hive.default.distribute_mv_2 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -398,7 +398,7 @@ STAGE PLANS: columns: key string, value string sort columns: value string, key string expanded text: SELECT `src_txn`.`key`, `src_txn`.`value` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250 - name: default.distribute_mv_2 + name: hive.default.distribute_mv_2 original text: SELECT key, value FROM src_txn where key > 200 and key < 250 rewrite enabled: true @@ -412,7 +412,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.distribute_mv_2 + name: hive.default.distribute_mv_2 retrieve and include: true Stage: Stage-0 @@ -883,7 +883,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.distribute_mv_3 + name: hive.default.distribute_mv_3 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -926,7 +926,7 @@ STAGE PLANS: expanded text: SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn`, `default`.`src_txn_2` WHERE `src_txn`.`key` = `src_txn_2`.`key` AND `src_txn`.`key` > 200 AND `src_txn`.`key` < 250 - name: default.distribute_mv_3 + name: hive.default.distribute_mv_3 original text: SELECT src_txn.value, src_txn.key FROM src_txn, src_txn_2 WHERE src_txn.key = src_txn_2.key AND src_txn.key > 200 AND src_txn.key < 250 @@ -942,7 +942,7 @@ WHERE src_txn.key = src_txn_2.key Stage: Stage-5 Materialized View Update - name: default.distribute_mv_3 + name: hive.default.distribute_mv_3 retrieve and include: true Stage: Stage-0 diff --git ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out index 25c5aedc12..61a512b795 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out @@ -80,7 +80,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.cluster_mv_1 + name: hive.default.cluster_mv_1 Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double) outputColumnNames: col1, col2, col3 @@ -130,7 +130,7 @@ STAGE PLANS: columns: value string, key string sort columns: key string expanded text: SELECT `value`, `key`, `partkey` FROM (SELECT `src_txn`.`value`, `src_txn`.`key`, `src_txn`.`key` + 100 as `partkey` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250) `cluster_mv_1` - name: default.cluster_mv_1 + name: hive.default.cluster_mv_1 original text: SELECT value, key, key + 100 as partkey FROM src_txn where key > 200 and key < 250 rewrite enabled: true @@ -144,7 +144,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.cluster_mv_1 + name: hive.default.cluster_mv_1 Stage: Stage-3 Stats Work @@ -156,7 +156,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.cluster_mv_1 + name: hive.default.cluster_mv_1 retrieve and include: true PREHOOK: query: CREATE MATERIALIZED VIEW cluster_mv_1 PARTITIONED ON (partkey) CLUSTERED ON (key) AS diff --git ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out index 85e22c791d..bb9262fb24 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out @@ -118,7 +118,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.partition_mv_1 + name: hive.default.partition_mv_1 Stage: Stage-2 Dependency Collection @@ -128,7 +128,7 @@ STAGE PLANS: partition columns: key string columns: value string expanded text: SELECT `value`, `key` FROM (SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250) `partition_mv_1` - name: default.partition_mv_1 + name: hive.default.partition_mv_1 original text: SELECT value, key FROM src_txn where key > 200 and key < 250 rewrite enabled: true @@ -142,7 +142,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.partition_mv_1 + name: hive.default.partition_mv_1 Stage: Stage-3 Stats Work @@ -154,7 +154,7 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.partition_mv_1 + name: hive.default.partition_mv_1 retrieve and include: true PREHOOK: query: CREATE MATERIALIZED VIEW partition_mv_1 PARTITIONED ON (key) AS diff --git ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out index da6e057636..f4949089fb 100644 --- ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out +++ ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out @@ -118,7 +118,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.partition_mv_sdp + name: hive.default.partition_mv_sdp Stage: Stage-2 Dependency Collection @@ -128,7 +128,7 @@ STAGE PLANS: partition columns: key string columns: value string expanded text: SELECT `value`, `key` FROM (SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250) `partition_mv_sdp` - name: default.partition_mv_sdp + name: hive.default.partition_mv_sdp original text: SELECT value, key FROM src_txn where key > 200 and key < 250 rewrite enabled: true @@ -142,7 +142,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.partition_mv_sdp + name: hive.default.partition_mv_sdp Stage: Stage-3 Stats Work @@ -154,6 +154,6 @@ STAGE PLANS: Stage: Stage-5 Materialized View Update - name: default.partition_mv_sdp + name: hive.default.partition_mv_sdp retrieve and include: true diff --git ql/src/test/results/clientpositive/llap/metadataonly1.q.out ql/src/test/results/clientpositive/llap/metadataonly1.q.out index 54cd83ddee..d6d519c133 100644 --- ql/src/test/results/clientpositive/llap/metadataonly1.q.out +++ ql/src/test/results/clientpositive/llap/metadataonly1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE TEST1_n12(A INT, B DOUBLE) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST1_n12 +PREHOOK: Output: default@test1_n12 POSTHOOK: query: CREATE TABLE TEST1_n12(A INT, B DOUBLE) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST1_n12 +POSTHOOK: Output: default@test1_n12 PREHOOK: query: explain extended select max(ds) from TEST1_n12 PREHOOK: type: QUERY PREHOOK: Input: default@test1_n12 @@ -191,6 +191,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -342,6 +343,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -493,6 +495,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -659,6 +662,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -707,6 +711,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -786,6 +791,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -834,6 +840,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -968,11 +975,11 @@ POSTHOOK: Input: default@test1_n12@ds=2 PREHOOK: query: CREATE TABLE TEST2_n8(A INT, B DOUBLE) partitioned by (ds string, hr string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST2_n8 +PREHOOK: Output: default@test2_n8 POSTHOOK: query: CREATE TABLE TEST2_n8(A INT, B DOUBLE) partitioned by (ds string, hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST2_n8 +POSTHOOK: Output: default@test2_n8 PREHOOK: query: alter table TEST2_n8 add partition (ds='1', hr='1') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: default@test2_n8 @@ -1088,6 +1095,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1136,6 +1144,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1184,6 +1193,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1350,6 +1360,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1399,6 +1410,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1448,6 +1460,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1607,6 +1620,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1654,6 +1668,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1872,6 +1887,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1920,6 +1936,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -1968,6 +1985,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -2016,6 +2034,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments @@ -2064,6 +2083,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns a,b columns.comments diff --git ql/src/test/results/clientpositive/llap/mm_bhif.q.out ql/src/test/results/clientpositive/llap/mm_bhif.q.out index cd908657a5..eafad741b9 100644 --- ql/src/test/results/clientpositive/llap/mm_bhif.q.out +++ ql/src/test/results/clientpositive/llap/mm_bhif.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_mm(key STRING, val STRING) PARTITIONED BY (ds st CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE tblproperties ("transactional"="true", "transactional_properties"="insert_only") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_mm +PREHOOK: Output: default@t1_mm POSTHOOK: query: CREATE TABLE T1_mm(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE tblproperties ("transactional"="true", "transactional_properties"="insert_only") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_mm +POSTHOOK: Output: default@t1_mm PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_mm PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/multiMapJoin1.q.out ql/src/test/results/clientpositive/llap/multiMapJoin1.q.out index 69a071bc61..4c550d1ccd 100644 --- ql/src/test/results/clientpositive/llap/multiMapJoin1.q.out +++ ql/src/test/results/clientpositive/llap/multiMapJoin1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table smallTbl1(key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@smallTbl1 +PREHOOK: Output: default@smalltbl1 POSTHOOK: query: create table smallTbl1(key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@smallTbl1 +POSTHOOK: Output: default@smalltbl1 RUN: Stage-0:DDL PREHOOK: query: insert overwrite table smallTbl1 select * from src where key < 10 PREHOOK: type: QUERY @@ -24,11 +24,11 @@ RUN: Stage-3:STATS PREHOOK: query: create table smallTbl2(key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@smallTbl2 +PREHOOK: Output: default@smalltbl2 POSTHOOK: query: create table smallTbl2(key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@smallTbl2 +POSTHOOK: Output: default@smalltbl2 RUN: Stage-0:DDL PREHOOK: query: insert overwrite table smallTbl2 select * from src where key < 10 PREHOOK: type: QUERY @@ -47,11 +47,11 @@ RUN: Stage-3:STATS PREHOOK: query: create table smallTbl3(key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@smallTbl3 +PREHOOK: Output: default@smalltbl3 POSTHOOK: query: create table smallTbl3(key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@smallTbl3 +POSTHOOK: Output: default@smalltbl3 RUN: Stage-0:DDL PREHOOK: query: insert overwrite table smallTbl3 select * from src where key < 10 PREHOOK: type: QUERY @@ -70,11 +70,11 @@ RUN: Stage-3:STATS PREHOOK: query: create table smallTbl4(key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@smallTbl4 +PREHOOK: Output: default@smalltbl4 POSTHOOK: query: create table smallTbl4(key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@smallTbl4 +POSTHOOK: Output: default@smalltbl4 RUN: Stage-0:DDL PREHOOK: query: insert overwrite table smallTbl4 select * from src where key < 10 PREHOOK: type: QUERY @@ -93,11 +93,11 @@ RUN: Stage-3:STATS PREHOOK: query: create table bigTbl(key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@bigTbl +PREHOOK: Output: default@bigtbl POSTHOOK: query: create table bigTbl(key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@bigTbl +POSTHOOK: Output: default@bigtbl RUN: Stage-0:DDL PREHOOK: query: insert overwrite table bigTbl select * from @@ -710,11 +710,11 @@ RUN: Stage-0:DDL PREHOOK: query: create table bigTbl(key1 string, key2 string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@bigTbl +PREHOOK: Output: default@bigtbl POSTHOOK: query: create table bigTbl(key1 string, key2 string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@bigTbl +POSTHOOK: Output: default@bigtbl RUN: Stage-0:DDL PREHOOK: query: insert overwrite table bigTbl select * from diff --git ql/src/test/results/clientpositive/llap/nonReservedKeyWords.q.out ql/src/test/results/clientpositive/llap/nonReservedKeyWords.q.out index 3177d4f338..65df3506ff 100644 --- ql/src/test/results/clientpositive/llap/nonReservedKeyWords.q.out +++ ql/src/test/results/clientpositive/llap/nonReservedKeyWords.q.out @@ -1,11 +1,11 @@ PREHOOK: query: explain CREATE TABLE CACHE (col STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@CACHE +PREHOOK: Output: default@cache POSTHOOK: query: explain CREATE TABLE CACHE (col STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@CACHE +POSTHOOK: Output: default@cache STAGE DEPENDENCIES: Stage-0 is a root stage @@ -13,19 +13,19 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: col string - name: default.CACHE input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.cache PREHOOK: query: explain CREATE TABLE DAYOFWEEK (col STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DAYOFWEEK +PREHOOK: Output: default@dayofweek POSTHOOK: query: explain CREATE TABLE DAYOFWEEK (col STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DAYOFWEEK +POSTHOOK: Output: default@dayofweek STAGE DEPENDENCIES: Stage-0 is a root stage @@ -33,19 +33,19 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: col string - name: default.DAYOFWEEK input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.dayofweek PREHOOK: query: explain CREATE TABLE VIEWS (col STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@VIEWS +PREHOOK: Output: default@views POSTHOOK: query: explain CREATE TABLE VIEWS (col STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@VIEWS +POSTHOOK: Output: default@views STAGE DEPENDENCIES: Stage-0 is a root stage @@ -53,8 +53,8 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: col string - name: default.VIEWS input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.views diff --git ql/src/test/results/clientpositive/llap/parquet_array_map_emptynullvals.q.out ql/src/test/results/clientpositive/llap/parquet_array_map_emptynullvals.q.out index 46086072fb..1e2e6b4e13 100644 --- ql/src/test/results/clientpositive/llap/parquet_array_map_emptynullvals.q.out +++ ql/src/test/results/clientpositive/llap/parquet_array_map_emptynullvals.q.out @@ -13,7 +13,7 @@ mapValues map) stored as parquet PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testSets +PREHOOK: Output: default@testsets POSTHOOK: query: create table testSets ( key string, arrayValues array, @@ -21,7 +21,7 @@ mapValues map) stored as parquet POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testSets +POSTHOOK: Output: default@testsets PREHOOK: query: insert into table testSets select 'abcd', array(), map() from src limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -40,7 +40,7 @@ mapValues map) stored as parquet PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testSets2 +PREHOOK: Output: default@testsets2 POSTHOOK: query: create table testSets2 ( key string, arrayValues array, @@ -48,7 +48,7 @@ mapValues map) stored as parquet POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testSets2 +POSTHOOK: Output: default@testsets2 PREHOOK: query: insert into table testSets2 select * from testSets PREHOOK: type: QUERY PREHOOK: Input: default@testsets diff --git ql/src/test/results/clientpositive/llap/partition_ctas.q.out ql/src/test/results/clientpositive/llap/partition_ctas.q.out index 1700007677..7597b56d52 100644 --- ql/src/test/results/clientpositive/llap/partition_ctas.q.out +++ ql/src/test/results/clientpositive/llap/partition_ctas.q.out @@ -111,11 +111,11 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: value string - name: default.partition_ctas_1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat partition columns: key string serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.partition_ctas_1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/llap/partition_type_in_plan.q.out ql/src/test/results/clientpositive/llap/partition_type_in_plan.q.out index 6082c29f58..2c7b2b960d 100644 --- ql/src/test/results/clientpositive/llap/partition_type_in_plan.q.out +++ ql/src/test/results/clientpositive/llap/partition_type_in_plan.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE datePartTbl(col1 string) PARTITIONED BY (date_prt date) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@datePartTbl +PREHOOK: Output: default@dateparttbl POSTHOOK: query: CREATE TABLE datePartTbl(col1 string) PARTITIONED BY (date_prt date) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@datePartTbl +POSTHOOK: Output: default@dateparttbl PREHOOK: query: INSERT OVERWRITE TABLE datePartTbl PARTITION(date_prt='2014-08-09') SELECT 'col1-2014-08-09' FROM src LIMIT 1 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/partition_wise_fileformat13.q.out ql/src/test/results/clientpositive/llap/partition_wise_fileformat13.q.out index 9ed53cdc7c..1c638da1b4 100644 --- ql/src/test/results/clientpositive/llap/partition_wise_fileformat13.q.out +++ ql/src/test/results/clientpositive/llap/partition_wise_fileformat13.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n16(key string, value string) partitioned by (dt string) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n16 +PREHOOK: Output: default@t1_n16 POSTHOOK: query: create table T1_n16(key string, value string) partitioned by (dt string) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n16 +POSTHOOK: Output: default@t1_n16 PREHOOK: query: alter table T1_n16 set serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' PREHOOK: type: ALTERTABLE_SERIALIZER PREHOOK: Input: default@t1_n16 @@ -53,11 +53,11 @@ POSTHOOK: Output: default@t1_n16 PREHOOK: query: create table T2_n10(key string, value string) partitioned by (dt string) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n10 +PREHOOK: Output: default@t2_n10 POSTHOOK: query: create table T2_n10(key string, value string) partitioned by (dt string) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n10 +POSTHOOK: Output: default@t2_n10 PREHOOK: query: insert overwrite table T2_n10 partition (dt='1') select * from src where key = 238 or key = 97 PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/llap/partition_wise_fileformat17.q.out ql/src/test/results/clientpositive/llap/partition_wise_fileformat17.q.out index 3cb6e52b9a..a0365997d2 100644 --- ql/src/test/results/clientpositive/llap/partition_wise_fileformat17.q.out +++ ql/src/test/results/clientpositive/llap/partition_wise_fileformat17.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE PW17(`USER` STRING, COMPLEXDT ARRAY) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PW17 +PREHOOK: Output: default@pw17 POSTHOOK: query: CREATE TABLE PW17(`USER` STRING, COMPLEXDT ARRAY) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PW17 +POSTHOOK: Output: default@pw17 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -57,11 +57,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE PW17_2(`USER` STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PW17_2 +PREHOOK: Output: default@pw17_2 POSTHOOK: query: CREATE TABLE PW17_2(`USER` STRING, COMPLEXDT ARRAY) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PW17_2 +POSTHOOK: Output: default@pw17_2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_2 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -86,11 +86,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE PW17_3(`USER` STRING, COMPLEXDT ARRAY >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PW17_3 +PREHOOK: Output: default@pw17_3 POSTHOOK: query: CREATE TABLE PW17_3(`USER` STRING, COMPLEXDT ARRAY >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PW17_3 +POSTHOOK: Output: default@pw17_3 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -138,11 +138,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE PW17_4(`USER` STRING, COMPLEXDT ARRAY >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PW17_4 +PREHOOK: Output: default@pw17_4 POSTHOOK: query: CREATE TABLE PW17_4(`USER` STRING, COMPLEXDT ARRAY >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PW17_4 +POSTHOOK: Output: default@pw17_4 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_4 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/partition_wise_fileformat18.q.out ql/src/test/results/clientpositive/llap/partition_wise_fileformat18.q.out index 0fa59b6cd0..0c6cd1fa2d 100644 --- ql/src/test/results/clientpositive/llap/partition_wise_fileformat18.q.out +++ ql/src/test/results/clientpositive/llap/partition_wise_fileformat18.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE PW18(`USER` STRING, COMPLEXDT UNIONTYPE) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PW18 +PREHOOK: Output: default@pw18 POSTHOOK: query: CREATE TABLE PW18(`USER` STRING, COMPLEXDT UNIONTYPE) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PW18 +POSTHOOK: Output: default@pw18 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -49,11 +49,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE PW18_2(`USER` STRING, COMPLEXDT UNIONTYPE) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PW18_2 +PREHOOK: Output: default@pw18_2 POSTHOOK: query: CREATE TABLE PW18_2(`USER` STRING, COMPLEXDT UNIONTYPE) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PW18_2 +POSTHOOK: Output: default@pw18_2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18_2 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/progress_1.q.out ql/src/test/results/clientpositive/llap/progress_1.q.out index 077176f306..c2fd26ba52 100644 --- ql/src/test/results/clientpositive/llap/progress_1.q.out +++ ql/src/test/results/clientpositive/llap/progress_1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE PROGRESS_1(key int, value string) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PROGRESS_1 +PREHOOK: Output: default@progress_1 POSTHOOK: query: CREATE TABLE PROGRESS_1(key int, value string) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PROGRESS_1 +POSTHOOK: Output: default@progress_1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv6.txt' INTO TABLE PROGRESS_1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/rcfile_columnar.q.out ql/src/test/results/clientpositive/llap/rcfile_columnar.q.out index 183f3d1954..bdba8b90fa 100644 --- ql/src/test/results/clientpositive/llap/rcfile_columnar.q.out +++ ql/src/test/results/clientpositive/llap/rcfile_columnar.q.out @@ -6,7 +6,7 @@ STORED AS OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@columnTable +PREHOOK: Output: default@columntable POSTHOOK: query: CREATE table columnTable (key STRING, value STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' @@ -15,7 +15,7 @@ STORED AS OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@columnTable +POSTHOOK: Output: default@columntable PREHOOK: query: FROM src INSERT OVERWRITE TABLE columnTable SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/rcfile_lazydecompress.q.out ql/src/test/results/clientpositive/llap/rcfile_lazydecompress.q.out index 461ee38a61..416b9ef7af 100644 --- ql/src/test/results/clientpositive/llap/rcfile_lazydecompress.q.out +++ ql/src/test/results/clientpositive/llap/rcfile_lazydecompress.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE table rcfileTableLazyDecompress (key STRING, value STRING) STORED AS RCFile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@rcfileTableLazyDecompress +PREHOOK: Output: default@rcfiletablelazydecompress POSTHOOK: query: CREATE table rcfileTableLazyDecompress (key STRING, value STRING) STORED AS RCFile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@rcfileTableLazyDecompress +POSTHOOK: Output: default@rcfiletablelazydecompress PREHOOK: query: FROM src INSERT OVERWRITE TABLE rcfileTableLazyDecompress SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/rcfile_union.q.out ql/src/test/results/clientpositive/llap/rcfile_union.q.out index 77ad47cf68..ac0bb70bac 100644 --- ql/src/test/results/clientpositive/llap/rcfile_union.q.out +++ ql/src/test/results/clientpositive/llap/rcfile_union.q.out @@ -4,14 +4,14 @@ ROW FORMAT SERDE STORED AS RCFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@rcfile_unionTable +PREHOOK: Output: default@rcfile_uniontable POSTHOOK: query: CREATE table rcfile_unionTable (b STRING, c STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@rcfile_unionTable +POSTHOOK: Output: default@rcfile_uniontable PREHOOK: query: FROM src INSERT OVERWRITE TABLE rcfile_unionTable SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/rename_column.q.out ql/src/test/results/clientpositive/llap/rename_column.q.out index 15816bbf98..586ad7c51e 100644 --- ql/src/test/results/clientpositive/llap/rename_column.q.out +++ ql/src/test/results/clientpositive/llap/rename_column.q.out @@ -63,7 +63,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Change Column - table name: default.kv_rename_test + table name: hive.default.kv_rename_test new column name: a2 new column type: int old column name: a1 @@ -101,7 +101,7 @@ STAGE PLANS: Stage: Stage-0 Change Column after column: b - table name: default.kv_rename_test + table name: hive.default.kv_rename_test new column name: a new column type: int old column name: a2 @@ -154,7 +154,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Change Column - table name: default.kv_rename_test + table name: hive.default.kv_rename_test new column comment: test comment2 new column name: a2 new column type: int diff --git ql/src/test/results/clientpositive/llap/schemeAuthority3.q.out ql/src/test/results/clientpositive/llap/schemeAuthority3.q.out index b26bf42f76..6b9348af76 100644 --- ql/src/test/results/clientpositive/llap/schemeAuthority3.q.out +++ ql/src/test/results/clientpositive/llap/schemeAuthority3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table noschemeTable(key string) partitioned by (value string, value2 string) row format delimited fields terminated by '\\t' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@noschemeTable +PREHOOK: Output: default@noschemetable POSTHOOK: query: create table noschemeTable(key string) partitioned by (value string, value2 string) row format delimited fields terminated by '\\t' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@noschemeTable +POSTHOOK: Output: default@noschemetable PREHOOK: query: insert into noschemeTable partition(value='0', value2='clusterA') select key from src where (key = 10) order by key PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out index 3fc0074ed4..121788a313 100644 --- ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out +++ ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out @@ -1379,7 +1379,7 @@ STAGE PLANS: Create View columns: key string, value string expanded text: select distinct `src`.`key`, `src`.`value` from `default`.`src` order by `src`.`key` limit 2 - name: default.sdi + name: hive.default.sdi original text: select distinct * from src order by key limit 2 PREHOOK: query: create view sdi as select distinct * from src order by key limit 2 @@ -3868,7 +3868,7 @@ STAGE PLANS: Create View columns: key string, value string expanded text: select distinct `src`.`key`, `src`.`value` from `default`.`src` order by `src`.`key` limit 2 - name: default.sdi + name: hive.default.sdi original text: select distinct * from src order by key limit 2 PREHOOK: query: create view sdi as select distinct * from src order by key limit 2 diff --git ql/src/test/results/clientpositive/llap/semijoin_hint.q.out ql/src/test/results/clientpositive/llap/semijoin_hint.q.out index 10e3ba040a..2f537cde8e 100644 --- ql/src/test/results/clientpositive/llap/semijoin_hint.q.out +++ ql/src/test/results/clientpositive/llap/semijoin_hint.q.out @@ -3308,19 +3308,19 @@ STAGE PLANS: PREHOOK: query: create table acidTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acidTbl +PREHOOK: Output: default@acidtbl POSTHOOK: query: create table acidTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acidTbl +POSTHOOK: Output: default@acidtbl PREHOOK: query: create table nonAcidOrcTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='false') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nonAcidOrcTbl +PREHOOK: Output: default@nonacidorctbl POSTHOOK: query: create table nonAcidOrcTbl(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='false') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nonAcidOrcTbl +POSTHOOK: Output: default@nonacidorctbl PREHOOK: query: explain merge into acidTbl as t using nonAcidOrcTbl s ON t.a = s.a WHEN MATCHED AND s.a > 8 THEN DELETE WHEN MATCHED THEN UPDATE SET b = 7 diff --git ql/src/test/results/clientpositive/llap/semijoin_reddedup.q.out ql/src/test/results/clientpositive/llap/semijoin_reddedup.q.out index fcae6cae9a..f0f50389a7 100644 --- ql/src/test/results/clientpositive/llap/semijoin_reddedup.q.out +++ ql/src/test/results/clientpositive/llap/semijoin_reddedup.q.out @@ -526,10 +526,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: c_name string, c_custkey bigint, o_orderkey bigint, o_orderdate string, o_totalprice double, _c5 double - name: tpch_test.q18_large_volume_customer_cached input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde name: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: hive.tpch_test.q18_large_volume_customer_cached table properties: transactional true transactional_properties default diff --git ql/src/test/results/clientpositive/llap/serde_opencsv.q.out ql/src/test/results/clientpositive/llap/serde_opencsv.q.out index f39ee7322c..7683af9f8c 100644 --- ql/src/test/results/clientpositive/llap/serde_opencsv.q.out +++ ql/src/test/results/clientpositive/llap/serde_opencsv.q.out @@ -43,7 +43,6 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: words string, int1 int, tinyint1 tinyint, smallint1 smallint, bigint1 bigint, boolean1 boolean, float1 float, double1 double - name: default.serde_opencsv input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.OpenCSVSerde @@ -51,6 +50,7 @@ STAGE PLANS: escapeChar \ quoteChar ' separatorChar , + name: hive.default.serde_opencsv PREHOOK: query: CREATE TABLE serde_opencsv( words STRING, diff --git ql/src/test/results/clientpositive/llap/serde_regex.q.out ql/src/test/results/clientpositive/llap/serde_regex.q.out index dfa39c24e3..33a42bdfdf 100644 --- ql/src/test/results/clientpositive/llap/serde_regex.q.out +++ ql/src/test/results/clientpositive/llap/serde_regex.q.out @@ -43,12 +43,12 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: host string, identity string, user string, time string, request string, status string, size int, referer string, agent string - name: default.serde_regex input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.RegexSerDe serde properties: input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ "]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))? + name: hive.default.serde_regex PREHOOK: query: CREATE TABLE serde_regex( host STRING, @@ -161,12 +161,12 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key decimal(38,18), value int - name: default.serde_regex1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.RegexSerDe serde properties: input.regex ([^ ]*) ([^ ]*) + name: hive.default.serde_regex1 PREHOOK: query: CREATE TABLE serde_regex1( key decimal(38,18), diff --git ql/src/test/results/clientpositive/llap/set_tblproperties.q.out ql/src/test/results/clientpositive/llap/set_tblproperties.q.out index 7003274e82..217c397337 100644 --- ql/src/test/results/clientpositive/llap/set_tblproperties.q.out +++ ql/src/test/results/clientpositive/llap/set_tblproperties.q.out @@ -20,7 +20,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Set Properties - table name: default.t + table name: hive.default.t properties: a x b y @@ -68,7 +68,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Unset Properties - table name: default.t + table name: hive.default.t properties: a b diff --git ql/src/test/results/clientpositive/llap/sharedwork.q.out ql/src/test/results/clientpositive/llap/sharedwork.q.out index f8d3b4b2f5..a68ead5034 100644 --- ql/src/test/results/clientpositive/llap/sharedwork.q.out +++ ql/src/test/results/clientpositive/llap/sharedwork.q.out @@ -5,7 +5,7 @@ PREHOOK: query: create table MY_TABLE_0001 ( col_20 string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@MY_TABLE_0001 +PREHOOK: Output: default@my_table_0001 POSTHOOK: query: create table MY_TABLE_0001 ( col_1 string, col_3 timestamp, @@ -13,45 +13,45 @@ POSTHOOK: query: create table MY_TABLE_0001 ( col_20 string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@MY_TABLE_0001 +POSTHOOK: Output: default@my_table_0001 PREHOOK: query: create table MY_TABLE_0001_00 ( col_1 string, col_22 string, col_23 int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@MY_TABLE_0001_00 +PREHOOK: Output: default@my_table_0001_00 POSTHOOK: query: create table MY_TABLE_0001_00 ( col_1 string, col_22 string, col_23 int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@MY_TABLE_0001_00 +POSTHOOK: Output: default@my_table_0001_00 PREHOOK: query: create table MY_TABLE_0003 ( col_24 string, col_21 string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@MY_TABLE_0003 +PREHOOK: Output: default@my_table_0003 POSTHOOK: query: create table MY_TABLE_0003 ( col_24 string, col_21 string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@MY_TABLE_0003 +POSTHOOK: Output: default@my_table_0003 PREHOOK: query: create table MY_TABLE_0001_01 ( col_1 string, col_100 string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@MY_TABLE_0001_01 +PREHOOK: Output: default@my_table_0001_01 POSTHOOK: query: create table MY_TABLE_0001_01 ( col_1 string, col_100 string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@MY_TABLE_0001_01 +POSTHOOK: Output: default@my_table_0001_01 PREHOOK: query: explain extended SELECT Table__323.col_7, CAST(Table__323.col_3 AS DATE) col_3, @@ -169,6 +169,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_20":"true","col_3":"true","col_7":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_1,col_3,col_7,col_20 columns.comments @@ -191,6 +192,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_20":"true","col_3":"true","col_7":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_1,col_3,col_7,col_20 columns.comments @@ -265,6 +267,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_21":"true","col_24":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_24,col_21 columns.comments @@ -287,6 +290,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_21":"true","col_24":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_24,col_21 columns.comments @@ -344,6 +348,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_22":"true","col_23":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_1,col_22,col_23 columns.comments @@ -366,6 +371,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_22":"true","col_23":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_1,col_22,col_23 columns.comments @@ -422,6 +428,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_100":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_1,col_100 columns.comments @@ -444,6 +451,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"col_1":"true","col_100":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns col_1,col_100 columns.comments @@ -739,6 +747,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment columns.comments @@ -761,6 +770,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment columns.comments @@ -809,6 +819,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment columns.comments @@ -831,6 +842,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment columns.comments @@ -901,6 +913,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment columns.comments @@ -923,6 +936,7 @@ STAGE PLANS: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"p_brand":"true","p_comment":"true","p_container":"true","p_mfgr":"true","p_name":"true","p_partkey":"true","p_retailprice":"true","p_size":"true","p_type":"true"}} bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment columns.comments diff --git ql/src/test/results/clientpositive/llap/skewjoin.q.out ql/src/test/results/clientpositive/llap/skewjoin.q.out index c8b827d50b..f24e4e65f5 100644 --- ql/src/test/results/clientpositive/llap/skewjoin.q.out +++ ql/src/test/results/clientpositive/llap/skewjoin.q.out @@ -1,35 +1,35 @@ PREHOOK: query: CREATE TABLE T1_n128(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n128 +PREHOOK: Output: default@t1_n128 POSTHOOK: query: CREATE TABLE T1_n128(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n128 +POSTHOOK: Output: default@t1_n128 PREHOOK: query: CREATE TABLE T2_n76(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n76 +PREHOOK: Output: default@t2_n76 POSTHOOK: query: CREATE TABLE T2_n76(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n76 +POSTHOOK: Output: default@t2_n76 PREHOOK: query: CREATE TABLE T3_n30(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n30 +PREHOOK: Output: default@t3_n30 POSTHOOK: query: CREATE TABLE T3_n30(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n30 +POSTHOOK: Output: default@t3_n30 PREHOOK: query: CREATE TABLE T4_n17(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n17 +PREHOOK: Output: default@t4_n17 POSTHOOK: query: CREATE TABLE T4_n17(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n17 +POSTHOOK: Output: default@t4_n17 PREHOOK: query: CREATE TABLE dest_j1_n17(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out index 2288d79244..e0b9a890fd 100644 --- ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out +++ ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE tmpT1_n109(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tmpT1_n109 +PREHOOK: Output: default@tmpt1_n109 POSTHOOK: query: CREATE TABLE tmpT1_n109(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tmpT1_n109 +POSTHOOK: Output: default@tmpt1_n109 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1_n109 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@tmpt1_n109 PREHOOK: query: CREATE TABLE T1_n109(key INT, val STRING) SKEWED BY (key) ON ((2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n109 +PREHOOK: Output: default@t1_n109 POSTHOOK: query: CREATE TABLE T1_n109(key INT, val STRING) SKEWED BY (key) ON ((2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n109 +POSTHOOK: Output: default@t1_n109 PREHOOK: query: INSERT OVERWRITE TABLE T1_n109 SELECT key, val FROM tmpT1_n109 PREHOOK: type: QUERY PREHOOK: Input: default@tmpt1_n109 @@ -35,11 +35,11 @@ POSTHOOK: Lineage: t1_n109.val SIMPLE [(tmpt1_n109)tmpt1_n109.FieldSchema(name:v PREHOOK: query: CREATE TABLE tmpT2_n66(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tmpT2_n66 +PREHOOK: Output: default@tmpt2_n66 POSTHOOK: query: CREATE TABLE tmpT2_n66(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tmpT2_n66 +POSTHOOK: Output: default@tmpt2_n66 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE tmpT2_n66 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -51,11 +51,11 @@ POSTHOOK: Output: default@tmpt2_n66 PREHOOK: query: CREATE TABLE T2_n66(key INT, val STRING) SKEWED BY (key) ON ((3)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n66 +PREHOOK: Output: default@t2_n66 POSTHOOK: query: CREATE TABLE T2_n66(key INT, val STRING) SKEWED BY (key) ON ((3)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n66 +POSTHOOK: Output: default@t2_n66 PREHOOK: query: INSERT OVERWRITE TABLE T2_n66 SELECT key, val FROM tmpT2_n66 PREHOOK: type: QUERY PREHOOK: Input: default@tmpt2_n66 diff --git ql/src/test/results/clientpositive/llap/skiphf_aggr.q.out ql/src/test/results/clientpositive/llap/skiphf_aggr.q.out index 253eb4bf4d..c6cb73482d 100644 --- ql/src/test/results/clientpositive/llap/skiphf_aggr.q.out +++ ql/src/test/results/clientpositive/llap/skiphf_aggr.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' TBLPROPERTIES('skip.header.line.count'='1') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@skipHTbl +PREHOOK: Output: default@skiphtbl POSTHOOK: query: CREATE TABLE skipHTbl (a int) PARTITIONED BY (b int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' TBLPROPERTIES('skip.header.line.count'='1') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@skipHTbl +POSTHOOK: Output: default@skiphtbl PREHOOK: query: INSERT OVERWRITE TABLE skipHTbl PARTITION (b = 1) VALUES (1), (2), (3), (4) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table @@ -163,14 +163,14 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' TBLPROPERTIES('skip.footer.line.count'='1') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@skipFTbl +PREHOOK: Output: default@skipftbl POSTHOOK: query: CREATE TABLE skipFTbl (a int) PARTITIONED BY (b int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' TBLPROPERTIES('skip.footer.line.count'='1') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@skipFTbl +POSTHOOK: Output: default@skipftbl PREHOOK: query: INSERT OVERWRITE TABLE skipFTbl PARTITION (b = 1) VALUES (1), (2), (3), (4) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/sqlmerge.q.out ql/src/test/results/clientpositive/llap/sqlmerge.q.out index 8384a6c851..4d6deea8d5 100644 --- ql/src/test/results/clientpositive/llap/sqlmerge.q.out +++ ql/src/test/results/clientpositive/llap/sqlmerge.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table acidTbl_n0(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@acidTbl_n0 +PREHOOK: Output: default@acidtbl_n0 POSTHOOK: query: create table acidTbl_n0(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@acidTbl_n0 +POSTHOOK: Output: default@acidtbl_n0 PREHOOK: query: create table nonAcidOrcTbl_n0(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='false') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nonAcidOrcTbl_n0 +PREHOOK: Output: default@nonacidorctbl_n0 POSTHOOK: query: create table nonAcidOrcTbl_n0(a int, b int) clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='false') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nonAcidOrcTbl_n0 +POSTHOOK: Output: default@nonacidorctbl_n0 PREHOOK: query: explain merge into acidTbl_n0 as t using nonAcidOrcTbl_n0 s ON t.a = s.a WHEN MATCHED AND s.a > 8 THEN DELETE WHEN MATCHED THEN UPDATE SET b = 7 diff --git ql/src/test/results/clientpositive/llap/subquery_notin.q.out ql/src/test/results/clientpositive/llap/subquery_notin.q.out index 3bb3a042a0..b81463bb89 100644 --- ql/src/test/results/clientpositive/llap/subquery_notin.q.out +++ ql/src/test/results/clientpositive/llap/subquery_notin.q.out @@ -1488,29 +1488,29 @@ select key from src where key <'11' PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@T1_v +PREHOOK: Output: default@t1_v POSTHOOK: query: create view T1_v as select key from src where key <'11' POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_v -POSTHOOK: Lineage: T1_v.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Output: default@t1_v +POSTHOOK: Lineage: t1_v.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: create view T2_v as select case when key > '104' then null else key end as key from T1_v PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Input: default@t1_v PREHOOK: Output: database:default -PREHOOK: Output: default@T2_v +PREHOOK: Output: default@t2_v POSTHOOK: query: create view T2_v as select case when key > '104' then null else key end as key from T1_v POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Input: default@t1_v POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_v -POSTHOOK: Lineage: T2_v.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Output: default@t2_v +POSTHOOK: Lineage: t2_v.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] Warning: Shuffle Join MERGEJOIN[34][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product PREHOOK: query: explain select * @@ -6519,11 +6519,11 @@ POSTHOOK: Lineage: t7.j SCRIPT [] PREHOOK: query: create table fixOb(i int, j int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@fixOb +PREHOOK: Output: default@fixob POSTHOOK: query: create table fixOb(i int, j int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@fixOb +POSTHOOK: Output: default@fixob PREHOOK: query: insert into fixOb values(-1, 5), (-1, 15) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/subquery_scalar.q.out ql/src/test/results/clientpositive/llap/subquery_scalar.q.out index ad5c9f0af6..cde87ef9e9 100644 --- ql/src/test/results/clientpositive/llap/subquery_scalar.q.out +++ ql/src/test/results/clientpositive/llap/subquery_scalar.q.out @@ -4701,11 +4701,11 @@ POSTHOOK: Output: default@tempty_n0 PREHOOK: query: create table EMPS_n4(EMPNO int,NAME string,DEPTNO int,GENDER string,CITY string,EMPID int,AGE int,SLACKER boolean,MANAGER boolean,JOINEDAT date) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@EMPS_n4 +PREHOOK: Output: default@emps_n4 POSTHOOK: query: create table EMPS_n4(EMPNO int,NAME string,DEPTNO int,GENDER string,CITY string,EMPID int,AGE int,SLACKER boolean,MANAGER boolean,JOINEDAT date) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@EMPS_n4 +POSTHOOK: Output: default@emps_n4 PREHOOK: query: insert into EMPS_n4 values (100,'Fred',10,NULL,NULL,30,25,true,false,'1996-08-03') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table @@ -4799,11 +4799,11 @@ POSTHOOK: Lineage: emps_n4.slacker SCRIPT [] PREHOOK: query: create table DEPTS_n3(deptno int, name string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEPTS_n3 +PREHOOK: Output: default@depts_n3 POSTHOOK: query: create table DEPTS_n3(deptno int, name string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEPTS_n3 +POSTHOOK: Output: default@depts_n3 PREHOOK: query: insert into DEPTS_n3 values( 10,'Sales') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/table_access_keys_stats.q.out ql/src/test/results/clientpositive/llap/table_access_keys_stats.q.out index 27d04594d1..2933d44aba 100644 --- ql/src/test/results/clientpositive/llap/table_access_keys_stats.q.out +++ ql/src/test/results/clientpositive/llap/table_access_keys_stats.q.out @@ -1,7 +1,7 @@ PREHOOK: query: CREATE TABLE T1_n13(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n13 +PREHOOK: Output: default@t1_n13 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n13 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -9,11 +9,11 @@ PREHOOK: Output: default@t1_n13 PREHOOK: query: CREATE TABLE T2_n8(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n8 +PREHOOK: Output: default@t2_n8 PREHOOK: query: CREATE TABLE T3_n4(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n4 +PREHOOK: Output: default@t3_n4 PREHOOK: query: SELECT key, count(1) FROM T1_n13 GROUP BY key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n13 diff --git ql/src/test/results/clientpositive/llap/table_set_owner.q.out ql/src/test/results/clientpositive/llap/table_set_owner.q.out index 023cceed97..7134c03042 100644 --- ql/src/test/results/clientpositive/llap/table_set_owner.q.out +++ ql/src/test/results/clientpositive/llap/table_set_owner.q.out @@ -16,7 +16,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 #### A masked pattern was here #### - table name: default.t + table name: hive.default.t #### A masked pattern was here #### Principal name: user1 diff --git ql/src/test/results/clientpositive/llap/table_storage.q.out ql/src/test/results/clientpositive/llap/table_storage.q.out index 9837d30e2b..5964803917 100644 --- ql/src/test/results/clientpositive/llap/table_storage.q.out +++ ql/src/test/results/clientpositive/llap/table_storage.q.out @@ -41,7 +41,7 @@ STAGE PLANS: Stage: Stage-0 Clustered By bucket columns: key - table name: default.t + table name: hive.default.t number of buckets: 2 sort columns: key ASC @@ -90,7 +90,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Into Buckets - table name: default.t + table name: hive.default.t number of buckets: 3 PREHOOK: query: ALTER TABLE t INTO 3 BUCKETS @@ -139,7 +139,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Not Sorted - table name: default.t + table name: hive.default.t PREHOOK: query: ALTER TABLE t NOT SORTED PREHOOK: type: ALTERTABLE_CLUSTER_SORT @@ -186,7 +186,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Not Clustered - table name: default.t + table name: hive.default.t PREHOOK: query: ALTER TABLE t NOT CLUSTERED PREHOOK: type: ALTERTABLE_CLUSTER_SORT @@ -230,7 +230,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Skewed By - table name: default.t + table name: hive.default.t skewedColumnNames: key skewedColumnValues: [a], [b] isStoredAsDirectories: true @@ -280,7 +280,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 #### A masked pattern was here #### - table name: default.t + table name: hive.default.t #### A masked pattern was here #### #### A masked pattern was here #### @@ -329,7 +329,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Not Skewed - table name: default.t + table name: hive.default.t PREHOOK: query: ALTER TABLE t NOT SKEWED PREHOOK: type: ALTERTABLE_SKEWED @@ -373,7 +373,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Set File Format - table name: default.t + table name: hive.default.t input format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat output format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat serde name: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe @@ -420,7 +420,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 #### A masked pattern was here #### - table name: default.t + table name: hive.default.t #### A masked pattern was here #### #### A masked pattern was here #### @@ -466,7 +466,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Set Serde - table name: default.t + table name: hive.default.t serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe PREHOOK: query: ALTER TABLE t SET SERDE "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" @@ -511,7 +511,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Set Serde Props - table name: default.t + table name: hive.default.t properties: property1 value1 property2 value2 diff --git ql/src/test/results/clientpositive/llap/temp_table.q.out ql/src/test/results/clientpositive/llap/temp_table.q.out index 98a5919403..0071bbf035 100644 --- ql/src/test/results/clientpositive/llap/temp_table.q.out +++ ql/src/test/results/clientpositive/llap/temp_table.q.out @@ -50,11 +50,11 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string - name: default.foo input format: org.apache.hadoop.mapred.TextInputFormat #### A masked pattern was here #### output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.foo isTemporary: true Stage: Stage-3 @@ -129,11 +129,11 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string - name: default.bar input format: org.apache.hadoop.mapred.TextInputFormat #### A masked pattern was here #### output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.bar isTemporary: true Stage: Stage-3 diff --git ql/src/test/results/clientpositive/llap/temp_table_alter_partition_change_col.q.out ql/src/test/results/clientpositive/llap/temp_table_alter_partition_change_col.q.out index b7de24eb6b..c87e705347 100644 --- ql/src/test/results/clientpositive/llap/temp_table_alter_partition_change_col.q.out +++ ql/src/test/results/clientpositive/llap/temp_table_alter_partition_change_col.q.out @@ -947,7 +947,7 @@ PARTITIONED BY ( `partition_col` string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@alterPartTbl +PREHOOK: Output: default@alterparttbl POSTHOOK: query: CREATE temporary TABLE `alterPartTbl`( col_1col_1col_1col_1col_1col_11 string, col_1col_1col_1col_1col_1col_12 string, @@ -1111,7 +1111,7 @@ PARTITIONED BY ( `partition_col` string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@alterPartTbl +POSTHOOK: Output: default@alterparttbl PREHOOK: query: alter table alterPartTbl add partition(partition_col='CCL') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: default@alterparttbl diff --git ql/src/test/results/clientpositive/llap/temp_table_names.q.out ql/src/test/results/clientpositive/llap/temp_table_names.q.out index f8ad01a2d9..b1caa773db 100644 --- ql/src/test/results/clientpositive/llap/temp_table_names.q.out +++ ql/src/test/results/clientpositive/llap/temp_table_names.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create temporary table Default.Temp_Table_Names (C1 string, c2 string) PREHOOK: type: CREATETABLE -PREHOOK: Output: Default@Temp_Table_Names PREHOOK: Output: database:default +PREHOOK: Output: default@temp_table_names POSTHOOK: query: create temporary table Default.Temp_Table_Names (C1 string, c2 string) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: Default@Temp_Table_Names POSTHOOK: Output: database:default +POSTHOOK: Output: default@temp_table_names PREHOOK: query: show tables 'Temp_Table*' PREHOOK: type: SHOWTABLES PREHOOK: Input: database:default diff --git ql/src/test/results/clientpositive/llap/temp_table_partition_type_in_plan.q.out ql/src/test/results/clientpositive/llap/temp_table_partition_type_in_plan.q.out index 146ade6d80..1e0e239cce 100644 --- ql/src/test/results/clientpositive/llap/temp_table_partition_type_in_plan.q.out +++ ql/src/test/results/clientpositive/llap/temp_table_partition_type_in_plan.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TEMPORARY TABLE datePartTbl_temp(col1 string) PARTITIONED BY (date_prt date) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@datePartTbl_temp +PREHOOK: Output: default@dateparttbl_temp POSTHOOK: query: CREATE TEMPORARY TABLE datePartTbl_temp(col1 string) PARTITIONED BY (date_prt date) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@datePartTbl_temp +POSTHOOK: Output: default@dateparttbl_temp PREHOOK: query: INSERT OVERWRITE TABLE datePartTbl_temp PARTITION(date_prt='2014-08-09') SELECT 'col1-2014-08-09' FROM src LIMIT 1 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/tez_acid_union_multiinsert.q.out ql/src/test/results/clientpositive/llap/tez_acid_union_multiinsert.q.out index d91a321c7a..192848493a 100644 --- ql/src/test/results/clientpositive/llap/tez_acid_union_multiinsert.q.out +++ ql/src/test/results/clientpositive/llap/tez_acid_union_multiinsert.q.out @@ -25,83 +25,83 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE DEST1_acid_1(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_1 +PREHOOK: Output: default@dest1_acid_1 POSTHOOK: query: CREATE TABLE DEST1_acid_1(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_1 +POSTHOOK: Output: default@dest1_acid_1 PREHOOK: query: CREATE TABLE DEST1_acid_2(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_2 +PREHOOK: Output: default@dest1_acid_2 POSTHOOK: query: CREATE TABLE DEST1_acid_2(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_2 +POSTHOOK: Output: default@dest1_acid_2 PREHOOK: query: CREATE TABLE DEST1_acid_3(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_3 +PREHOOK: Output: default@dest1_acid_3 POSTHOOK: query: CREATE TABLE DEST1_acid_3(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_3 +POSTHOOK: Output: default@dest1_acid_3 PREHOOK: query: CREATE TABLE DEST1_acid_4(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_4 +PREHOOK: Output: default@dest1_acid_4 POSTHOOK: query: CREATE TABLE DEST1_acid_4(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_4 +POSTHOOK: Output: default@dest1_acid_4 PREHOOK: query: CREATE TABLE DEST1_acid_5(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_5 +PREHOOK: Output: default@dest1_acid_5 POSTHOOK: query: CREATE TABLE DEST1_acid_5(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_5 +POSTHOOK: Output: default@dest1_acid_5 PREHOOK: query: CREATE TABLE DEST1_acid_6(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_6 +PREHOOK: Output: default@dest1_acid_6 POSTHOOK: query: CREATE TABLE DEST1_acid_6(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_6 +POSTHOOK: Output: default@dest1_acid_6 PREHOOK: query: CREATE TABLE DEST1_acid_7(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_7 +PREHOOK: Output: default@dest1_acid_7 POSTHOOK: query: CREATE TABLE DEST1_acid_7(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_7 +POSTHOOK: Output: default@dest1_acid_7 PREHOOK: query: CREATE TABLE DEST1_acid_8(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_8 +PREHOOK: Output: default@dest1_acid_8 POSTHOOK: query: CREATE TABLE DEST1_acid_8(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_8 +POSTHOOK: Output: default@dest1_acid_8 PREHOOK: query: CREATE TABLE DEST1_acid_9(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_9 +PREHOOK: Output: default@dest1_acid_9 POSTHOOK: query: CREATE TABLE DEST1_acid_9(key STRING, value STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_9 +POSTHOOK: Output: default@dest1_acid_9 PREHOOK: query: CREATE TABLE DEST1_acid_10(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_acid_10 +PREHOOK: Output: default@dest1_acid_10 POSTHOOK: query: CREATE TABLE DEST1_acid_10(key STRING, val1 STRING, val2 STRING) STORED AS ORC TBLPROPERTIES('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_acid_10 +POSTHOOK: Output: default@dest1_acid_10 PREHOOK: query: FROM ( select key, value from ( select 'tst1' as key, cast(count(1) as string) as value, 'tst1' as value2 from src s1 diff --git ql/src/test/results/clientpositive/llap/tez_dml.q.out ql/src/test/results/clientpositive/llap/tez_dml.q.out index d716b63012..a3d27799f8 100644 --- ql/src/test/results/clientpositive/llap/tez_dml.q.out +++ ql/src/test/results/clientpositive/llap/tez_dml.q.out @@ -106,10 +106,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: value string, cnt bigint - name: default.tmp_src input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.tmp_src Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/tez_join_result_complex.q.out ql/src/test/results/clientpositive/llap/tez_join_result_complex.q.out index fe0f101393..cdc920cc63 100644 --- ql/src/test/results/clientpositive/llap/tez_join_result_complex.q.out +++ ql/src/test/results/clientpositive/llap/tez_join_result_complex.q.out @@ -215,6 +215,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns contact_event_id,ce_create_dt,ce_end_dt,contact_type,cnctevs_cd,contact_mode,cntvnst_stts_cd,total_transfers,ce_notes columns.comments @@ -240,6 +241,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns contact_event_id,ce_create_dt,ce_end_dt,contact_type,cnctevs_cd,contact_mode,cntvnst_stts_cd,total_transfers,ce_notes columns.comments @@ -305,6 +307,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + cat hive columns contact_event_id,ce_create_dt,ce_end_dt,contact_type,cnctevs_cd,contact_mode,cntvnst_stts_cd,total_transfers,ce_notes,svcrqst_id,svcrqct_cds,svcrtyp_cd,cmpltyp_cd,src,cnctmd_cd,notes columns.types string:string:string:string:string:string:string:int:array:string:array:string:string:string:string:array name default.ct_events1_test @@ -328,6 +331,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns cnctevn_id,svcrqst_id,svcrqst_crt_dts,subject_seq_no,plan_component,cust_segment,cnctyp_cd,cnctmd_cd,cnctevs_cd,svcrtyp_cd,svrstyp_cd,cmpltyp_cd,catsrsn_cd,apealvl_cd,cnstnty_cd,svcrqst_asrqst_ind,svcrqst_rtnorig_in,svcrqst_vwasof_dt,sum_reason_cd,sum_reason,crsr_master_claim_index,svcrqct_cds,svcrqst_lupdt,crsr_lupdt,cntevsds_lupdt,ignore_me,notes columns.comments @@ -353,6 +357,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns cnctevn_id,svcrqst_id,svcrqst_crt_dts,subject_seq_no,plan_component,cust_segment,cnctyp_cd,cnctmd_cd,cnctevs_cd,svcrtyp_cd,svrstyp_cd,cmpltyp_cd,catsrsn_cd,apealvl_cd,cnstnty_cd,svcrqst_asrqst_ind,svcrqst_rtnorig_in,svcrqst_vwasof_dt,sum_reason_cd,sum_reason,crsr_master_claim_index,svcrqct_cds,svcrqst_lupdt,crsr_lupdt,cntevsds_lupdt,ignore_me,notes columns.comments @@ -383,10 +388,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: contact_event_id string, ce_create_dt string, ce_end_dt string, contact_type string, cnctevs_cd string, contact_mode string, cntvnst_stts_cd string, total_transfers int, ce_notes array, svcrqst_id string, svcrqct_cds array, svcrtyp_cd string, cmpltyp_cd string, src string, cnctmd_cd string, notes array - name: default.ct_events1_test input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.ct_events1_test Stage: Stage-3 Stats Work @@ -1220,6 +1225,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns contact_event_id,ce_create_dt,ce_end_dt,contact_type,cnctevs_cd,contact_mode,cntvnst_stts_cd,total_transfers,ce_notes columns.comments @@ -1245,6 +1251,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns contact_event_id,ce_create_dt,ce_end_dt,contact_type,cnctevs_cd,contact_mode,cntvnst_stts_cd,total_transfers,ce_notes columns.comments @@ -1310,6 +1317,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + cat hive columns contact_event_id,ce_create_dt,ce_end_dt,contact_type,cnctevs_cd,contact_mode,cntvnst_stts_cd,total_transfers,ce_notes,svcrqst_id,svcrqct_cds,svcrtyp_cd,cmpltyp_cd,src,cnctmd_cd,notes columns.types string:string:string:string:string:string:string:int:array:string:array:string:string:string:string:array name default.ct_events1_test @@ -1333,6 +1341,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns cnctevn_id,svcrqst_id,svcrqst_crt_dts,subject_seq_no,plan_component,cust_segment,cnctyp_cd,cnctmd_cd,cnctevs_cd,svcrtyp_cd,svrstyp_cd,cmpltyp_cd,catsrsn_cd,apealvl_cd,cnstnty_cd,svcrqst_asrqst_ind,svcrqst_rtnorig_in,svcrqst_vwasof_dt,sum_reason_cd,sum_reason,crsr_master_claim_index,svcrqct_cds,svcrqst_lupdt,crsr_lupdt,cntevsds_lupdt,ignore_me,notes columns.comments @@ -1358,6 +1367,7 @@ STAGE PLANS: properties: bucket_count -1 bucketing_version 2 + cat hive column.name.delimiter , columns cnctevn_id,svcrqst_id,svcrqst_crt_dts,subject_seq_no,plan_component,cust_segment,cnctyp_cd,cnctmd_cd,cnctevs_cd,svcrtyp_cd,svrstyp_cd,cmpltyp_cd,catsrsn_cd,apealvl_cd,cnstnty_cd,svcrqst_asrqst_ind,svcrqst_rtnorig_in,svcrqst_vwasof_dt,sum_reason_cd,sum_reason,crsr_master_claim_index,svcrqct_cds,svcrqst_lupdt,crsr_lupdt,cntevsds_lupdt,ignore_me,notes columns.comments @@ -1388,10 +1398,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: contact_event_id string, ce_create_dt string, ce_end_dt string, contact_type string, cnctevs_cd string, contact_mode string, cntvnst_stts_cd string, total_transfers int, ce_notes array, svcrqst_id string, svcrqct_cds array, svcrtyp_cd string, cmpltyp_cd string, src string, cnctmd_cd string, notes array - name: default.ct_events1_test input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.ct_events1_test Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/tez_union.q.out ql/src/test/results/clientpositive/llap/tez_union.q.out index e57afcdd0d..38c8a8bc14 100644 --- ql/src/test/results/clientpositive/llap/tez_union.q.out +++ ql/src/test/results/clientpositive/llap/tez_union.q.out @@ -1515,11 +1515,11 @@ POSTHOOK: Output: default@ut PREHOOK: query: create table TABLE1_n3(EMP_NAME STRING, EMP_ID INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TABLE1_n3 +PREHOOK: Output: default@table1_n3 POSTHOOK: query: create table TABLE1_n3(EMP_NAME STRING, EMP_ID INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TABLE1_n3 +POSTHOOK: Output: default@table1_n3 PREHOOK: query: create table table2_n1 (EMP_NAME STRING) PARTITIONED BY (EMP_ID INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out index d5bc1790d4..d12f27300b 100644 --- ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out +++ ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n150(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n150 +PREHOOK: Output: default@dest1_n150 POSTHOOK: query: CREATE TABLE DEST1_n150(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n150 +POSTHOOK: Output: default@dest1_n150 PREHOOK: query: CREATE TABLE DEST2_n39(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n39 +PREHOOK: Output: default@dest2_n39 POSTHOOK: query: CREATE TABLE DEST2_n39(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n39 +POSTHOOK: Output: default@dest2_n39 PREHOOK: query: explain FROM ( select key, value from ( diff --git ql/src/test/results/clientpositive/llap/type_change_test_fraction.q.out ql/src/test/results/clientpositive/llap/type_change_test_fraction.q.out index b5db3e10a6..0664521b83 100644 --- ql/src/test/results/clientpositive/llap/type_change_test_fraction.q.out +++ ql/src/test/results/clientpositive/llap/type_change_test_fraction.q.out @@ -16,7 +16,7 @@ PREHOOK: query: create table testAltCol_n2 cNumeric3_2 NUMERIC(3,2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAltCol_n2 +PREHOOK: Output: default@testaltcol_n2 POSTHOOK: query: create table testAltCol_n2 (cId TINYINT, cFloat FLOAT, @@ -31,7 +31,7 @@ POSTHOOK: query: create table testAltCol_n2 cNumeric3_2 NUMERIC(3,2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltCol_n2 +POSTHOOK: Output: default@testaltcol_n2 PREHOOK: query: insert into testAltCol_n2 values (1, 1.234e5, @@ -558,12 +558,12 @@ PREHOOK: query: create table testAltColT_n2 stored as textfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n2 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColT_n2 +PREHOOK: Output: default@testaltcolt_n2 POSTHOOK: query: create table testAltColT_n2 stored as textfile as select * from testAltCol_n2 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColT_n2 +POSTHOOK: Output: default@testaltcolt_n2 POSTHOOK: Lineage: testaltcolt_n2.cdecimal16_8 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolt_n2.cdecimal38_18 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolt_n2.cdecimal38_37 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -2286,12 +2286,12 @@ PREHOOK: query: create table testAltColSF_n2 stored as sequencefile as select * PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n2 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColSF_n2 +PREHOOK: Output: default@testaltcolsf_n2 POSTHOOK: query: create table testAltColSF_n2 stored as sequencefile as select * from testAltCol_n2 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColSF_n2 +POSTHOOK: Output: default@testaltcolsf_n2 POSTHOOK: Lineage: testaltcolsf_n2.cdecimal16_8 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolsf_n2.cdecimal38_18 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolsf_n2.cdecimal38_37 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -4014,12 +4014,12 @@ PREHOOK: query: create table testAltColRCF_n2 stored as rcfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n2 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColRCF_n2 +PREHOOK: Output: default@testaltcolrcf_n2 POSTHOOK: query: create table testAltColRCF_n2 stored as rcfile as select * from testAltCol_n2 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColRCF_n2 +POSTHOOK: Output: default@testaltcolrcf_n2 POSTHOOK: Lineage: testaltcolrcf_n2.cdecimal16_8 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n2.cdecimal38_18 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n2.cdecimal38_37 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -5742,12 +5742,12 @@ PREHOOK: query: create table testAltColORC_n2 stored as orc as select * from tes PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n2 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColORC_n2 +PREHOOK: Output: default@testaltcolorc_n2 POSTHOOK: query: create table testAltColORC_n2 stored as orc as select * from testAltCol_n2 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColORC_n2 +POSTHOOK: Output: default@testaltcolorc_n2 POSTHOOK: Lineage: testaltcolorc_n2.cdecimal16_8 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolorc_n2.cdecimal38_18 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolorc_n2.cdecimal38_37 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -7010,12 +7010,12 @@ PREHOOK: query: create table testAltColPDE_n2 stored as parquet as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n2 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDE_n2 +PREHOOK: Output: default@testaltcolpde_n2 POSTHOOK: query: create table testAltColPDE_n2 stored as parquet as select * from testAltCol_n2 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDE_n2 +POSTHOOK: Output: default@testaltcolpde_n2 POSTHOOK: Lineage: testaltcolpde_n2.cdecimal16_8 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolpde_n2.cdecimal38_18 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolpde_n2.cdecimal38_37 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -8739,13 +8739,13 @@ select * from testAltCol_n2 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n2 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDD_n2 +PREHOOK: Output: default@testaltcolpdd_n2 POSTHOOK: query: create table testAltColPDD_n2 stored as parquet tblproperties ("parquet.enable.dictionary"="false") as select * from testAltCol_n2 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDD_n2 +POSTHOOK: Output: default@testaltcolpdd_n2 POSTHOOK: Lineage: testaltcolpdd_n2.cdecimal16_8 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolpdd_n2.cdecimal38_18 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolpdd_n2.cdecimal38_37 SIMPLE [(testaltcol_n2)testaltcol_n2.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] diff --git ql/src/test/results/clientpositive/llap/type_change_test_int.q.out ql/src/test/results/clientpositive/llap/type_change_test_int.q.out index 1a9b49a8af..aff189a184 100644 --- ql/src/test/results/clientpositive/llap/type_change_test_int.q.out +++ ql/src/test/results/clientpositive/llap/type_change_test_int.q.out @@ -10,7 +10,7 @@ PREHOOK: query: create table testAltCol_n1 cTinyint TINYINT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAltCol_n1 +PREHOOK: Output: default@testaltcol_n1 POSTHOOK: query: create table testAltCol_n1 (cId TINYINT, cBigInt BIGINT, @@ -19,7 +19,7 @@ POSTHOOK: query: create table testAltCol_n1 cTinyint TINYINT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltCol_n1 +POSTHOOK: Output: default@testaltcol_n1 PREHOOK: query: insert into testAltCol_n1 values (1, 1234567890123456789, @@ -132,12 +132,12 @@ PREHOOK: query: create table testAltColT_n1 stored as textfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColT_n1 +PREHOOK: Output: default@testaltcolt_n1 POSTHOOK: query: create table testAltColT_n1 stored as textfile as select * from testAltCol_n1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColT_n1 +POSTHOOK: Output: default@testaltcolt_n1 POSTHOOK: Lineage: testaltcolt_n1.cbigint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolt_n1.cid SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolt_n1.cint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cint, type:int, comment:null), ] @@ -501,12 +501,12 @@ PREHOOK: query: create table testAltColSF_n1 stored as sequencefile as select * PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColSF_n1 +PREHOOK: Output: default@testaltcolsf_n1 POSTHOOK: query: create table testAltColSF_n1 stored as sequencefile as select * from testAltCol_n1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColSF_n1 +POSTHOOK: Output: default@testaltcolsf_n1 POSTHOOK: Lineage: testaltcolsf_n1.cbigint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolsf_n1.cid SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolsf_n1.cint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cint, type:int, comment:null), ] @@ -870,12 +870,12 @@ PREHOOK: query: create table testAltColRCF_n1 stored as rcfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColRCF_n1 +PREHOOK: Output: default@testaltcolrcf_n1 POSTHOOK: query: create table testAltColRCF_n1 stored as rcfile as select * from testAltCol_n1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColRCF_n1 +POSTHOOK: Output: default@testaltcolrcf_n1 POSTHOOK: Lineage: testaltcolrcf_n1.cbigint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n1.cid SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n1.cint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cint, type:int, comment:null), ] @@ -1239,12 +1239,12 @@ PREHOOK: query: create table testAltColORC_n1 stored as orc as select * from tes PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColORC_n1 +PREHOOK: Output: default@testaltcolorc_n1 POSTHOOK: query: create table testAltColORC_n1 stored as orc as select * from testAltCol_n1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColORC_n1 +POSTHOOK: Output: default@testaltcolorc_n1 POSTHOOK: Lineage: testaltcolorc_n1.cbigint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolorc_n1.cid SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolorc_n1.cint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cint, type:int, comment:null), ] @@ -1608,12 +1608,12 @@ PREHOOK: query: create table testAltColPDE_n0 stored as parquet as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDE_n0 +PREHOOK: Output: default@testaltcolpde_n0 POSTHOOK: query: create table testAltColPDE_n0 stored as parquet as select * from testAltCol_n1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDE_n0 +POSTHOOK: Output: default@testaltcolpde_n0 POSTHOOK: Lineage: testaltcolpde_n0.cbigint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolpde_n0.cid SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolpde_n0.cint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cint, type:int, comment:null), ] @@ -1978,13 +1978,13 @@ select * from testAltCol_n1 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDD_n0 +PREHOOK: Output: default@testaltcolpdd_n0 POSTHOOK: query: create table testAltColPDD_n0 stored as parquet tblproperties ("parquet.enable.dictionary"="false") as select * from testAltCol_n1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDD_n0 +POSTHOOK: Output: default@testaltcolpdd_n0 POSTHOOK: Lineage: testaltcolpdd_n0.cbigint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolpdd_n0.cid SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolpdd_n0.cint SIMPLE [(testaltcol_n1)testaltcol_n1.FieldSchema(name:cint, type:int, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/uber_reduce.q.out ql/src/test/results/clientpositive/llap/uber_reduce.q.out index 44a9dea25f..72bdac5c53 100644 --- ql/src/test/results/clientpositive/llap/uber_reduce.q.out +++ ql/src/test/results/clientpositive/llap/uber_reduce.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n136(key STRING, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n136 +PREHOOK: Output: default@t1_n136 POSTHOOK: query: CREATE TABLE T1_n136(key STRING, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n136 +POSTHOOK: Output: default@t1_n136 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n136 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/udf_round_2.q.out ql/src/test/results/clientpositive/llap/udf_round_2.q.out index b018b53118..2ad07b2f8c 100644 --- ql/src/test/results/clientpositive/llap/udf_round_2.q.out +++ ql/src/test/results/clientpositive/llap/udf_round_2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table tstTbl1_n0(n double) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tstTbl1_n0 +PREHOOK: Output: default@tsttbl1_n0 POSTHOOK: query: create table tstTbl1_n0(n double) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tstTbl1_n0 +POSTHOOK: Output: default@tsttbl1_n0 PREHOOK: query: insert overwrite table tstTbl1_n0 select 'NaN' from src tablesample (1 rows) PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/udf_round_2_auto_stats.q.out ql/src/test/results/clientpositive/llap/udf_round_2_auto_stats.q.out index 4dbe8fc9f9..014e946df0 100644 --- ql/src/test/results/clientpositive/llap/udf_round_2_auto_stats.q.out +++ ql/src/test/results/clientpositive/llap/udf_round_2_auto_stats.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table tstTbl1(n double) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tstTbl1 +PREHOOK: Output: default@tsttbl1 POSTHOOK: query: create table tstTbl1(n double) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tstTbl1 +POSTHOOK: Output: default@tsttbl1 PREHOOK: query: insert overwrite table tstTbl1 select 'NaN' from src tablesample (1 rows) PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/union_date_trim.q.out ql/src/test/results/clientpositive/llap/union_date_trim.q.out index a51a5ac393..8657ef78c8 100644 --- ql/src/test/results/clientpositive/llap/union_date_trim.q.out +++ ql/src/test/results/clientpositive/llap/union_date_trim.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table testDate(id int, dt date) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testDate +PREHOOK: Output: default@testdate POSTHOOK: query: create table testDate(id int, dt date) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testDate +POSTHOOK: Output: default@testdate PREHOOK: query: insert into table testDate select 1, '2014-04-07' from src where key=100 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/llap/union_remove_26.q.out ql/src/test/results/clientpositive/llap/union_remove_26.q.out index 1b68b4bbfc..fc892f62e2 100644 --- ql/src/test/results/clientpositive/llap/union_remove_26.q.out +++ ql/src/test/results/clientpositive/llap/union_remove_26.q.out @@ -1,27 +1,27 @@ PREHOOK: query: create table inputSrcTbl1(key string, val int) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputSrcTbl1 +PREHOOK: Output: default@inputsrctbl1 POSTHOOK: query: create table inputSrcTbl1(key string, val int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputSrcTbl1 +POSTHOOK: Output: default@inputsrctbl1 PREHOOK: query: create table inputSrcTbl2(key string, val int) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputSrcTbl2 +PREHOOK: Output: default@inputsrctbl2 POSTHOOK: query: create table inputSrcTbl2(key string, val int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputSrcTbl2 +POSTHOOK: Output: default@inputsrctbl2 PREHOOK: query: create table inputSrcTbl3(key string, val int) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputSrcTbl3 +PREHOOK: Output: default@inputsrctbl3 POSTHOOK: query: create table inputSrcTbl3(key string, val int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputSrcTbl3 +POSTHOOK: Output: default@inputsrctbl3 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputSrcTbl1 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -49,27 +49,27 @@ POSTHOOK: Output: default@inputsrctbl3 PREHOOK: query: create table inputTbl1_n6(key string, val int) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n6 +PREHOOK: Output: default@inputtbl1_n6 POSTHOOK: query: create table inputTbl1_n6(key string, val int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n6 +POSTHOOK: Output: default@inputtbl1_n6 PREHOOK: query: create table inputTbl2(key string, val int) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl2 +PREHOOK: Output: default@inputtbl2 POSTHOOK: query: create table inputTbl2(key string, val int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl2 +POSTHOOK: Output: default@inputtbl2 PREHOOK: query: create table inputTbl3(key string, val int) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl3 +PREHOOK: Output: default@inputtbl3 POSTHOOK: query: create table inputTbl3(key string, val int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl3 +POSTHOOK: Output: default@inputtbl3 PREHOOK: query: insert into inputTbl1_n6 select * from inputSrcTbl1 PREHOOK: type: QUERY PREHOOK: Input: default@inputsrctbl1 diff --git ql/src/test/results/clientpositive/llap/union_top_level.q.out ql/src/test/results/clientpositive/llap/union_top_level.q.out index e0d86b3989..74e85d90f6 100644 --- ql/src/test/results/clientpositive/llap/union_top_level.q.out +++ ql/src/test/results/clientpositive/llap/union_top_level.q.out @@ -662,10 +662,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value int - name: default.union_top input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.union_top Stage: Stage-3 Stats Work @@ -1352,7 +1352,7 @@ union all select `b`.`key`, `b`.`value` from (select `src`.`key`, 1 as `value` from `default`.`src` where `src`.`key` % 3 == 1 limit 3)`b` union all select `c`.`key`, `c`.`value` from (select `src`.`key`, 2 as `value` from `default`.`src` where `src`.`key` % 3 == 2 limit 3)`c` - name: default.union_top_view + name: hive.default.union_top_view original text: select * from (select key, 0 as value from src where key % 3 == 0 limit 3)a union all select * from (select key, 1 as value from src where key % 3 == 1 limit 3)b diff --git ql/src/test/results/clientpositive/llap/unset_table_view_property.q.out ql/src/test/results/clientpositive/llap/unset_table_view_property.q.out index 5d140d68f9..23246590c8 100644 --- ql/src/test/results/clientpositive/llap/unset_table_view_property.q.out +++ ql/src/test/results/clientpositive/llap/unset_table_view_property.q.out @@ -7,11 +7,11 @@ POSTHOOK: Output: database:vt PREHOOK: query: CREATE TABLE vt.testTable(col1 INT, col2 INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:vt -PREHOOK: Output: vt@testTable +PREHOOK: Output: vt@testtable POSTHOOK: query: CREATE TABLE vt.testTable(col1 INT, col2 INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:vt -POSTHOOK: Output: vt@testTable +POSTHOOK: Output: vt@testtable PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable @@ -217,13 +217,13 @@ PREHOOK: query: CREATE VIEW vt.testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:vt -PREHOOK: Output: vt@testView +PREHOOK: Output: vt@testview POSTHOOK: query: CREATE VIEW vt.testView AS SELECT value FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:vt -POSTHOOK: Output: vt@testView -POSTHOOK: Lineage: testView.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: vt@testview +POSTHOOK: Lineage: testview.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propB'='200') PREHOOK: type: ALTERVIEW_PROPERTIES PREHOOK: Input: vt@testview diff --git ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out index c8947d5c21..381072a7f6 100644 --- ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out +++ ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out @@ -50,14 +50,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF_txt_n0 +PREHOOK: Output: default@decimal_udf_txt_n0 POSTHOOK: query: CREATE TABLE DECIMAL_UDF_txt_n0 (key decimal(20,10), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF_txt_n0 +POSTHOOK: Output: default@decimal_udf_txt_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -70,12 +70,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_UDF_n1 (key decimal(20,10), value int) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF_n1 +PREHOOK: Output: default@decimal_udf_n1 POSTHOOK: query: CREATE TABLE DECIMAL_UDF_n1 (key decimal(20,10), value int) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF_n1 +POSTHOOK: Output: default@decimal_udf_n1 PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF_n1 SELECT * FROM DECIMAL_UDF_txt_n0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf_txt_n0 diff --git ql/src/test/results/clientpositive/llap/vector_between_columns.q.out ql/src/test/results/clientpositive/llap/vector_between_columns.q.out index 546dc45b2f..e3246536cc 100644 --- ql/src/test/results/clientpositive/llap/vector_between_columns.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_columns.q.out @@ -2,22 +2,22 @@ PREHOOK: query: create table if not exists TSINT_txt ( RNUM int , CSINT smallint ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TSINT_txt +PREHOOK: Output: default@tsint_txt POSTHOOK: query: create table if not exists TSINT_txt ( RNUM int , CSINT smallint ) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TSINT_txt +POSTHOOK: Output: default@tsint_txt PREHOOK: query: create table if not exists TINT_txt ( RNUM int , CINT int ) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TINT_txt +PREHOOK: Output: default@tint_txt POSTHOOK: query: create table if not exists TINT_txt ( RNUM int , CINT int ) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TINT_txt +POSTHOOK: Output: default@tint_txt PREHOOK: query: load data local inpath '../../data/files/TSINT' into table TSINT_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -38,12 +38,12 @@ PREHOOK: query: create table TSINT stored as orc AS SELECT * FROM TSINT_txt PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@tsint_txt PREHOOK: Output: database:default -PREHOOK: Output: default@TSINT +PREHOOK: Output: default@tsint POSTHOOK: query: create table TSINT stored as orc AS SELECT * FROM TSINT_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@tsint_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@TSINT +POSTHOOK: Output: default@tsint POSTHOOK: Lineage: tsint.csint SIMPLE [(tsint_txt)tsint_txt.FieldSchema(name:csint, type:smallint, comment:null), ] POSTHOOK: Lineage: tsint.rnum SIMPLE [(tsint_txt)tsint_txt.FieldSchema(name:rnum, type:int, comment:null), ] tsint_txt.rnum tsint_txt.csint @@ -62,12 +62,12 @@ PREHOOK: query: create table TINT stored as orc AS SELECT * FROM TINT_txt PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@tint_txt PREHOOK: Output: database:default -PREHOOK: Output: default@TINT +PREHOOK: Output: default@tint POSTHOOK: query: create table TINT stored as orc AS SELECT * FROM TINT_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@tint_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@TINT +POSTHOOK: Output: default@tint POSTHOOK: Lineage: tint.cint SIMPLE [(tint_txt)tint_txt.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: tint.rnum SIMPLE [(tint_txt)tint_txt.FieldSchema(name:rnum, type:int, comment:null), ] tint_txt.rnum tint_txt.cint diff --git ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out index 9e8d6a07cf..6b07389879 100644 --- ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_varchar_1.q.out @@ -125,10 +125,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: _c0 int, _c1 string - name: default.varchar_ctas_1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.varchar_ctas_1 Stage: Stage-3 Stats Work @@ -299,10 +299,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: _c0 int, _c1 string - name: default.char_ctas_1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.char_ctas_1 Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 2f3ffd71e5..444b99ec84 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -26,12 +26,12 @@ PREHOOK: query: CREATE TABLE `DECIMAL` STORED AS ORC AS SELECT * FROM decimal_tx PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_txt PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL +PREHOOK: Output: default@decimal POSTHOOK: query: CREATE TABLE `DECIMAL` STORED AS ORC AS SELECT * FROM decimal_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL +POSTHOOK: Output: default@decimal POSTHOOK: Lineage: decimal.dec SIMPLE [(decimal_txt)decimal_txt.FieldSchema(name:dec, type:decimal(10,0), comment:null), ] PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT `dec` FROM `DECIMAL` order by `dec` diff --git ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out index b292c9a01b..e07bc1ebea 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out @@ -12,14 +12,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3_txt_n0 +PREHOOK: Output: default@decimal_3_txt_n0 POSTHOOK: query: CREATE TABLE DECIMAL_3_txt_n0(key decimal(38,18), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3_txt_n0 +POSTHOOK: Output: default@decimal_3_txt_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,12 +32,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_3_n1 STORED AS ORC AS SELECT * FROM DECIMAL PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_3_txt_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3_n1 +PREHOOK: Output: default@decimal_3_n1 POSTHOOK: query: CREATE TABLE DECIMAL_3_n1 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_3_txt_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3_n1 +POSTHOOK: Output: default@decimal_3_n1 POSTHOOK: Lineage: decimal_3_n1.key SIMPLE [(decimal_3_txt_n0)decimal_3_txt_n0.FieldSchema(name:key, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: decimal_3_n1.value SIMPLE [(decimal_3_txt_n0)decimal_3_txt_n0.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: SELECT * FROM DECIMAL_3_n1 ORDER BY key, value diff --git ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out index fc18645663..eb9b74b195 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out @@ -12,24 +12,24 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_4_1_n0 +PREHOOK: Output: default@decimal_4_1_n0 POSTHOOK: query: CREATE TABLE DECIMAL_4_1_n0(key decimal(35,25), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_4_1_n0 +POSTHOOK: Output: default@decimal_4_1_n0 PREHOOK: query: CREATE TABLE DECIMAL_4_2_n0(key decimal(35,25), value decimal(35,25)) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_4_2_n0 +PREHOOK: Output: default@decimal_4_2_n0 POSTHOOK: query: CREATE TABLE DECIMAL_4_2_n0(key decimal(35,25), value decimal(35,25)) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_4_2_n0 +POSTHOOK: Output: default@decimal_4_2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out index 58c40cd280..5446486fff 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out @@ -12,14 +12,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_5_txt +PREHOOK: Output: default@decimal_5_txt POSTHOOK: query: CREATE TABLE DECIMAL_5_txt(key decimal(10,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_5_txt +POSTHOOK: Output: default@decimal_5_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,12 +32,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_5(key decimal(10,5), value int) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_5 +PREHOOK: Output: default@decimal_5 POSTHOOK: query: CREATE TABLE DECIMAL_5(key decimal(10,5), value int) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_5 +POSTHOOK: Output: default@decimal_5 PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_5 SELECT * FROM DECIMAL_5_txt PREHOOK: type: QUERY PREHOOK: Input: default@decimal_5_txt diff --git ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out index e899da5c1f..b9a05667a6 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out @@ -28,28 +28,28 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_1_txt +PREHOOK: Output: default@decimal_6_1_txt POSTHOOK: query: CREATE TABLE DECIMAL_6_1_txt(key decimal(10,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_1_txt +POSTHOOK: Output: default@decimal_6_1_txt PREHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_2_txt +PREHOOK: Output: default@decimal_6_2_txt POSTHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_2_txt +POSTHOOK: Output: default@decimal_6_2_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -70,22 +70,22 @@ PREHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_1 +PREHOOK: Output: default@decimal_6_1 POSTHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_1 +POSTHOOK: Output: default@decimal_6_1 PREHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_2 +PREHOOK: Output: default@decimal_6_2 POSTHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_2 +POSTHOOK: Output: default@decimal_6_2 PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_1 SELECT * FROM DECIMAL_6_1_txt PREHOOK: type: QUERY PREHOOK: Input: default@decimal_6_1_txt @@ -642,13 +642,13 @@ CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_6_1 PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_3 +PREHOOK: Output: default@decimal_6_3 POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_6_1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_3 +POSTHOOK: Output: default@decimal_6_3 PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -733,7 +733,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde - name: default.DECIMAL_6_3 + name: default.decimal_6_3 Select Operator expressions: _col0 (type: decimal(11,5)), _col1 (type: int) outputColumnNames: col1, col2 @@ -761,10 +761,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: k decimal(11,5), v int - name: default.DECIMAL_6_3 input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde name: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: hive.default.decimal_6_3 Stage: Stage-3 Stats Work @@ -772,7 +772,7 @@ STAGE PLANS: Column Stats Desc: Columns: k, v Column Types: decimal(11,5), int - Table: default.DECIMAL_6_3 + Table: default.decimal_6_3 Stage: Stage-0 Move Operator @@ -784,12 +784,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_6_1 PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_3 +PREHOOK: Output: default@decimal_6_3 POSTHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_6_1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_3 +POSTHOOK: Output: default@decimal_6_3 POSTHOOK: Lineage: decimal_6_3.k EXPRESSION [(decimal_6_1)decimal_6_1.FieldSchema(name:key, type:decimal(10,5), comment:null), ] POSTHOOK: Lineage: decimal_6_3.v EXPRESSION [(decimal_6_1)decimal_6_1.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: desc DECIMAL_6_3 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index b21100fc05..ab080f7d71 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -16,14 +16,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION_txt +PREHOOK: Output: default@decimal_precision_txt POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt(`dec` decimal(20,10)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION_txt +POSTHOOK: Output: default@decimal_precision_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -36,12 +36,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_PRECISION(`dec` decimal(20,10)) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION +PREHOOK: Output: default@decimal_precision POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION(`dec` decimal(20,10)) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION +POSTHOOK: Output: default@decimal_precision PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_PRECISION SELECT * FROM DECIMAL_PRECISION_txt PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision_txt @@ -742,14 +742,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION_txt_small +PREHOOK: Output: default@decimal_precision_txt_small POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt_small(`dec` decimal(20,10)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION_txt_small +POSTHOOK: Output: default@decimal_precision_txt_small PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt_small PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out index 5902a0d4e7..e92f7c6b2e 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out @@ -16,7 +16,7 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_TRAILING_txt +PREHOOK: Output: default@decimal_trailing_txt POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING_txt ( id int, a decimal(10,4), @@ -27,7 +27,7 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_TRAILING_txt +POSTHOOK: Output: default@decimal_trailing_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -44,7 +44,7 @@ PREHOOK: query: CREATE TABLE DECIMAL_TRAILING ( STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_TRAILING +PREHOOK: Output: default@decimal_trailing POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING ( id int, a decimal(10,4), @@ -53,7 +53,7 @@ POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING ( STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_TRAILING +POSTHOOK: Output: default@decimal_trailing PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_TRAILING SELECT * FROM DECIMAL_TRAILING_txt PREHOOK: type: QUERY PREHOOK: Input: default@decimal_trailing_txt diff --git ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out index fd04e6bbc3..45bf364171 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out @@ -12,14 +12,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF_txt +PREHOOK: Output: default@decimal_udf_txt POSTHOOK: query: CREATE TABLE DECIMAL_UDF_txt (key decimal(20,10), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF_txt +POSTHOOK: Output: default@decimal_udf_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,12 +32,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_UDF_n0 (key decimal(20,10), value int) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF_n0 +PREHOOK: Output: default@decimal_udf_n0 POSTHOOK: query: CREATE TABLE DECIMAL_UDF_n0 (key decimal(20,10), value int) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF_n0 +POSTHOOK: Output: default@decimal_udf_n0 PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF_n0 SELECT * FROM DECIMAL_UDF_txt PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf_txt @@ -4157,14 +4157,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF_txt_small +PREHOOK: Output: default@decimal_udf_txt_small POSTHOOK: query: CREATE TABLE DECIMAL_UDF_txt_small (key decimal(15,3), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF_txt_small +POSTHOOK: Output: default@decimal_udf_txt_small PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF_txt_small PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out index 7c1fe9eac2..647ec4c7e0 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out @@ -12,14 +12,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF2_txt +PREHOOK: Output: default@decimal_udf2_txt POSTHOOK: query: CREATE TABLE DECIMAL_UDF2_txt (key decimal(14,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF2_txt +POSTHOOK: Output: default@decimal_udf2_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,12 +32,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_UDF2_n0 (key decimal(14,5), value int) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF2_n0 +PREHOOK: Output: default@decimal_udf2_n0 POSTHOOK: query: CREATE TABLE DECIMAL_UDF2_n0 (key decimal(14,5), value int) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF2_n0 +POSTHOOK: Output: default@decimal_udf2_n0 PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF2_n0 SELECT * FROM DECIMAL_UDF2_txt PREHOOK: type: QUERY PREHOOK: Input: default@decimal_udf2_txt diff --git ql/src/test/results/clientpositive/llap/vector_full_outer_join.q.out ql/src/test/results/clientpositive/llap/vector_full_outer_join.q.out index db107dfc4e..515d901c86 100644 --- ql/src/test/results/clientpositive/llap/vector_full_outer_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_full_outer_join.q.out @@ -9,35 +9,35 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN1 +PREHOOK: Output: default@tjoin1 POSTHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN1 +POSTHOOK: Output: default@tjoin1 PREHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN2 +PREHOOK: Output: default@tjoin2 POSTHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN2 +POSTHOOK: Output: default@tjoin2 PREHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN1STAGE +PREHOOK: Output: default@tjoin1stage POSTHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN1STAGE +POSTHOOK: Output: default@tjoin1stage PREHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN2STAGE +PREHOOK: Output: default@tjoin2stage POSTHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN2STAGE +POSTHOOK: Output: default@tjoin2stage PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin1.txt' OVERWRITE INTO TABLE TJOIN1STAGE PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out index 96c83be6b2..752ec73125 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n90(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n90 +PREHOOK: Output: default@t1_n90 POSTHOOK: query: CREATE TABLE T1_n90(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n90 +POSTHOOK: Output: default@t1_n90 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n90 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -940,19 +940,19 @@ NULL 6 PREHOOK: query: CREATE TABLE T2_n55(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n55 +PREHOOK: Output: default@t2_n55 POSTHOOK: query: CREATE TABLE T2_n55(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n55 +POSTHOOK: Output: default@t2_n55 PREHOOK: query: CREATE TABLE T3_n19(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n19 +PREHOOK: Output: default@t3_n19 POSTHOOK: query: CREATE TABLE T3_n19(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n19 +POSTHOOK: Output: default@t3_n19 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL FROM T1_n90 INSERT OVERWRITE TABLE T2_n55 SELECT key, val, count(1) group by key, val with cube diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out index 480011e12e..45920696e1 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text +PREHOOK: Output: default@t1_text POSTHOOK: query: CREATE TABLE T1_text(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text +POSTHOOK: Output: default@t1_text PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_text PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n17 STORED AS ORC AS SELECT * FROM T1_text PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n17 +PREHOOK: Output: default@t1_n17 POSTHOOK: query: CREATE TABLE T1_n17 STORED AS ORC AS SELECT * FROM T1_text POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n17 +POSTHOOK: Output: default@t1_n17 POSTHOOK: Lineage: t1_n17.key SIMPLE [(t1_text)t1_text.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t1_n17.val SIMPLE [(t1_text)t1_text.FieldSchema(name:val, type:string, comment:null), ] t1_text.key t1_text.val diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out index bf610a63de..a4dc1a06c6 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n4(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n4 +PREHOOK: Output: default@t1_text_n4 POSTHOOK: query: CREATE TABLE T1_text_n4(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n4 +POSTHOOK: Output: default@t1_text_n4 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_text_n4 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n73 STORED AS ORC AS SELECT * FROM T1_text_n4 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n4 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n73 +PREHOOK: Output: default@t1_n73 POSTHOOK: query: CREATE TABLE T1_n73 STORED AS ORC AS SELECT * FROM T1_text_n4 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n4 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n73 +POSTHOOK: Output: default@t1_n73 POSTHOOK: Lineage: t1_n73.key SIMPLE [(t1_text_n4)t1_text_n4.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: t1_n73.value SIMPLE [(t1_text_n4)t1_text_n4.FieldSchema(name:value, type:int, comment:null), ] t1_text_n4.key t1_text_n4.value diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out index 554cabb728..6cc89ecf75 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n9(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n9 +PREHOOK: Output: default@t1_text_n9 POSTHOOK: query: CREATE TABLE T1_text_n9(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n9 +POSTHOOK: Output: default@t1_text_n9 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_text_n9 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n112 STORED AS ORC AS SELECT * FROM T1_text_n9 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n9 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n112 +PREHOOK: Output: default@t1_n112 POSTHOOK: query: CREATE TABLE T1_n112 STORED AS ORC AS SELECT * FROM T1_text_n9 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n9 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n112 +POSTHOOK: Output: default@t1_n112 POSTHOOK: Lineage: t1_n112.key SIMPLE [(t1_text_n9)t1_text_n9.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: t1_n112.value SIMPLE [(t1_text_n9)t1_text_n9.FieldSchema(name:value, type:int, comment:null), ] t1_text_n9.key t1_text_n9.value diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out index 4d2dee40c1..adcff2ba31 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n0(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n0 +PREHOOK: Output: default@t1_text_n0 POSTHOOK: query: CREATE TABLE T1_text_n0(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n0 +POSTHOOK: Output: default@t1_text_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_text_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n30 STORED AS ORC AS SELECT * FROM T1_text_n0 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n30 +PREHOOK: Output: default@t1_n30 POSTHOOK: query: CREATE TABLE T1_n30 STORED AS ORC AS SELECT * FROM T1_text_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n30 +POSTHOOK: Output: default@t1_n30 POSTHOOK: Lineage: t1_n30.a SIMPLE [(t1_text_n0)t1_text_n0.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n30.b SIMPLE [(t1_text_n0)t1_text_n0.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n30.c SIMPLE [(t1_text_n0)t1_text_n0.FieldSchema(name:c, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out index c6587d1853..288582546b 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n3(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n3 +PREHOOK: Output: default@t1_text_n3 POSTHOOK: query: CREATE TABLE T1_text_n3(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n3 +POSTHOOK: Output: default@t1_text_n3 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_text_n3 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n69 STORED AS ORC AS SELECT * FROM T1_text_n3 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n69 +PREHOOK: Output: default@t1_n69 POSTHOOK: query: CREATE TABLE T1_n69 STORED AS ORC AS SELECT * FROM T1_text_n3 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n69 +POSTHOOK: Output: default@t1_n69 POSTHOOK: Lineage: t1_n69.a SIMPLE [(t1_text_n3)t1_text_n3.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n69.b SIMPLE [(t1_text_n3)t1_text_n3.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n69.c SIMPLE [(t1_text_n3)t1_text_n3.FieldSchema(name:c, type:string, comment:null), ] @@ -613,11 +613,11 @@ NULL NULL 23.0 PREHOOK: query: CREATE TABLE T2_n42(a STRING, b STRING, c int, d int) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n42 +PREHOOK: Output: default@t2_n42 POSTHOOK: query: CREATE TABLE T2_n42(a STRING, b STRING, c int, d int) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n42 +POSTHOOK: Output: default@t2_n42 PREHOOK: query: INSERT OVERWRITE TABLE T2_n42 SELECT a, b, c, c from T1_n69 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out index 50921a2388..0c595f0b2c 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n7(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n7 +PREHOOK: Output: default@t1_text_n7 POSTHOOK: query: CREATE TABLE T1_text_n7(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n7 +POSTHOOK: Output: default@t1_text_n7 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets1.txt' INTO TABLE T1_text_n7 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -26,12 +26,12 @@ PREHOOK: query: CREATE TABLE T1_n106 STORED AS ORC AS SELECT * FROM T1_text_n7 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n7 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n106 +PREHOOK: Output: default@t1_n106 POSTHOOK: query: CREATE TABLE T1_n106 STORED AS ORC AS SELECT * FROM T1_text_n7 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n7 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n106 +POSTHOOK: Output: default@t1_n106 POSTHOOK: Lineage: t1_n106.a SIMPLE [(t1_text_n7)t1_text_n7.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n106.b SIMPLE [(t1_text_n7)t1_text_n7.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n106.c SIMPLE [(t1_text_n7)t1_text_n7.FieldSchema(name:c, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out index 3c79f3ea3a..40bd2ff1e0 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n10(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n10 +PREHOOK: Output: default@t1_text_n10 POSTHOOK: query: CREATE TABLE T1_text_n10(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n10 +POSTHOOK: Output: default@t1_text_n10 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets1.txt' INTO TABLE T1_text_n10 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -26,12 +26,12 @@ PREHOOK: query: CREATE TABLE T1_n115 STORED AS ORC AS SELECT a, b, cast(c as dec PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n10 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n115 +PREHOOK: Output: default@t1_n115 POSTHOOK: query: CREATE TABLE T1_n115 STORED AS ORC AS SELECT a, b, cast(c as decimal(10,2)) as c_dec FROM T1_text_n10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n10 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n115 +POSTHOOK: Output: default@t1_n115 POSTHOOK: Lineage: t1_n115.a SIMPLE [(t1_text_n10)t1_text_n10.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n115.b SIMPLE [(t1_text_n10)t1_text_n10.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n115.c_dec EXPRESSION [(t1_text_n10)t1_text_n10.FieldSchema(name:c, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out index 95617f4adc..718a8d5b65 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n11(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n11 +PREHOOK: Output: default@t1_text_n11 POSTHOOK: query: CREATE TABLE T1_text_n11(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n11 +POSTHOOK: Output: default@t1_text_n11 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_text_n11 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n161 STORED AS ORC AS SELECT * FROM T1_text_n11 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n11 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n161 +PREHOOK: Output: default@t1_n161 POSTHOOK: query: CREATE TABLE T1_n161 STORED AS ORC AS SELECT * FROM T1_text_n11 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n11 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n161 +POSTHOOK: Output: default@t1_n161 POSTHOOK: Lineage: t1_n161.a SIMPLE [(t1_text_n11)t1_text_n11.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n161.b SIMPLE [(t1_text_n11)t1_text_n11.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n161.c SIMPLE [(t1_text_n11)t1_text_n11.FieldSchema(name:c, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out index dafe07d8c2..58b74308e8 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n1 +PREHOOK: Output: default@t1_text_n1 POSTHOOK: query: CREATE TABLE T1_text_n1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n1 +POSTHOOK: Output: default@t1_text_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_text_n1 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n33 STORED AS ORC AS SELECT * FROM T1_text_n1 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n33 +PREHOOK: Output: default@t1_n33 POSTHOOK: query: CREATE TABLE T1_n33 STORED AS ORC AS SELECT * FROM T1_text_n1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n33 +POSTHOOK: Output: default@t1_n33 POSTHOOK: Lineage: t1_n33.a SIMPLE [(t1_text_n1)t1_text_n1.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n33.b SIMPLE [(t1_text_n1)t1_text_n1.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n33.c SIMPLE [(t1_text_n1)t1_text_n1.FieldSchema(name:c, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out index 8a1ba0a088..840a3dcb25 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n6(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n6 +PREHOOK: Output: default@t1_text_n6 POSTHOOK: query: CREATE TABLE T1_text_n6(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n6 +POSTHOOK: Output: default@t1_text_n6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_text_n6 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n84 STORED AS ORC AS SELECT * FROM T1_text_n6 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n6 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n84 +PREHOOK: Output: default@t1_n84 POSTHOOK: query: CREATE TABLE T1_n84 STORED AS ORC AS SELECT * FROM T1_text_n6 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n6 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n84 +POSTHOOK: Output: default@t1_n84 POSTHOOK: Lineage: t1_n84.a SIMPLE [(t1_text_n6)t1_text_n6.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n84.b SIMPLE [(t1_text_n6)t1_text_n6.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n84.c SIMPLE [(t1_text_n6)t1_text_n6.FieldSchema(name:c, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out index 51762d003d..99d911d322 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n2(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n2 +PREHOOK: Output: default@t1_text_n2 POSTHOOK: query: CREATE TABLE T1_text_n2(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n2 +POSTHOOK: Output: default@t1_text_n2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_text_n2 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n47 STORED AS ORC AS SELECT * FROM T1_text_n2 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n2 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n47 +PREHOOK: Output: default@t1_n47 POSTHOOK: query: CREATE TABLE T1_n47 STORED AS ORC AS SELECT * FROM T1_text_n2 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n2 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n47 +POSTHOOK: Output: default@t1_n47 POSTHOOK: Lineage: t1_n47.key SIMPLE [(t1_text_n2)t1_text_n2.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: t1_n47.value SIMPLE [(t1_text_n2)t1_text_n2.FieldSchema(name:value, type:int, comment:null), ] t1_text_n2.key t1_text_n2.value diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out index a96e3ef279..cbc8c4b248 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n8(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n8 +PREHOOK: Output: default@t1_text_n8 POSTHOOK: query: CREATE TABLE T1_text_n8(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n8 +POSTHOOK: Output: default@t1_text_n8 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_text_n8 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n110 STORED AS ORC AS SELECT * FROM T1_text_n8 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n8 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n110 +PREHOOK: Output: default@t1_n110 POSTHOOK: query: CREATE TABLE T1_n110 STORED AS ORC AS SELECT * FROM T1_text_n8 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n8 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n110 +POSTHOOK: Output: default@t1_n110 POSTHOOK: Lineage: t1_n110.a SIMPLE [(t1_text_n8)t1_text_n8.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: t1_n110.b SIMPLE [(t1_text_n8)t1_text_n8.FieldSchema(name:b, type:string, comment:null), ] POSTHOOK: Lineage: t1_n110.c SIMPLE [(t1_text_n8)t1_text_n8.FieldSchema(name:c, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out index e6ae542cbe..d07f7b3789 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_text_n5(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_text_n5 +PREHOOK: Output: default@t1_text_n5 POSTHOOK: query: CREATE TABLE T1_text_n5(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_text_n5 +POSTHOOK: Output: default@t1_text_n5 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_text_n5 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T1_n83 STORED AS ORC AS SELECT * FROM T1_text_n5 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_text_n5 PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n83 +PREHOOK: Output: default@t1_n83 POSTHOOK: query: CREATE TABLE T1_n83 STORED AS ORC AS SELECT * FROM T1_text_n5 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_text_n5 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n83 +POSTHOOK: Output: default@t1_n83 POSTHOOK: Lineage: t1_n83.key SIMPLE [(t1_text_n5)t1_text_n5.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t1_n83.val SIMPLE [(t1_text_n5)t1_text_n5.FieldSchema(name:val, type:string, comment:null), ] PREHOOK: query: EXPLAIN VECTORIZATION DETAIL @@ -629,19 +629,19 @@ NULL 6 PREHOOK: query: CREATE TABLE T2_n52(key1 STRING, key2 STRING, val INT) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n52 +PREHOOK: Output: default@t2_n52 POSTHOOK: query: CREATE TABLE T2_n52(key1 STRING, key2 STRING, val INT) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n52 +POSTHOOK: Output: default@t2_n52 PREHOOK: query: CREATE TABLE T3_n17(key1 STRING, key2 STRING, val INT) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n17 +PREHOOK: Output: default@t3_n17 POSTHOOK: query: CREATE TABLE T3_n17(key1 STRING, key2 STRING, val INT) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n17 +POSTHOOK: Output: default@t3_n17 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL FROM T1_n83 INSERT OVERWRITE TABLE T2_n52 SELECT key, val, count(1) group by key, val with rollup diff --git ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out index e64d08517c..9ae7bb8469 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n137(key STRING, val STRING) PARTITIONED BY (ds CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n137 +PREHOOK: Output: default@t1_n137 POSTHOOK: query: CREATE TABLE T1_n137(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n137 +POSTHOOK: Output: default@t1_n137 PREHOOK: query: INSERT OVERWRITE TABLE T1_n137 PARTITION (ds='1') SELECT * from src where key < 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out index 0ae84aa689..2758a87820 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n2(key STRING, val STRING) PARTITIONED BY (ds st CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n2 +PREHOOK: Output: default@t1_n2 POSTHOOK: query: CREATE TABLE T1_n2(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n2 +POSTHOOK: Output: default@t1_n2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n2 PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out index 836d872f68..127e582c8d 100644 --- ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out @@ -9,35 +9,35 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN1 +PREHOOK: Output: default@tjoin1 POSTHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN1 +POSTHOOK: Output: default@tjoin1 PREHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN2 +PREHOOK: Output: default@tjoin2 POSTHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN2 +POSTHOOK: Output: default@tjoin2 PREHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN1STAGE +PREHOOK: Output: default@tjoin1stage POSTHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN1STAGE +POSTHOOK: Output: default@tjoin1stage PREHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN2STAGE +PREHOOK: Output: default@tjoin2stage POSTHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN2STAGE +POSTHOOK: Output: default@tjoin2stage PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin1.txt' OVERWRITE INTO TABLE TJOIN1STAGE PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/vector_left_outer_join3.q.out ql/src/test/results/clientpositive/llap/vector_left_outer_join3.q.out index b9e418e664..bb125d8b58 100644 --- ql/src/test/results/clientpositive/llap/vector_left_outer_join3.q.out +++ ql/src/test/results/clientpositive/llap/vector_left_outer_join3.q.out @@ -9,35 +9,35 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table if not exists TJOIN3 (name string, id int, flag string) STORED AS orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN3 +PREHOOK: Output: default@tjoin3 POSTHOOK: query: create table if not exists TJOIN3 (name string, id int, flag string) STORED AS orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN3 +POSTHOOK: Output: default@tjoin3 PREHOOK: query: create table if not exists TJOIN4 (code_name string, id int) STORED AS orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN4 +PREHOOK: Output: default@tjoin4 POSTHOOK: query: create table if not exists TJOIN4 (code_name string, id int) STORED AS orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN4 +POSTHOOK: Output: default@tjoin4 PREHOOK: query: create table if not exists TJOIN3STAGE (name string, id int, flag string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN3STAGE +PREHOOK: Output: default@tjoin3stage POSTHOOK: query: create table if not exists TJOIN3STAGE (name string, id int, flag string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN3STAGE +POSTHOOK: Output: default@tjoin3stage PREHOOK: query: create table if not exists TJOIN4STAGE (code_name string, id int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN4STAGE +PREHOOK: Output: default@tjoin4stage POSTHOOK: query: create table if not exists TJOIN4STAGE (code_name string, id int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN4STAGE +POSTHOOK: Output: default@tjoin4stage PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin3.txt' OVERWRITE INTO TABLE TJOIN3STAGE PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/llap/vector_outer_join6.q.out ql/src/test/results/clientpositive/llap/vector_outer_join6.q.out index c809983fcc..5efce9f446 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join6.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join6.q.out @@ -2,42 +2,42 @@ PREHOOK: query: create table TJOIN1_txt (RNUM int , C1 int, C2 int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN1_txt +PREHOOK: Output: default@tjoin1_txt POSTHOOK: query: create table TJOIN1_txt (RNUM int , C1 int, C2 int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN1_txt +POSTHOOK: Output: default@tjoin1_txt PREHOOK: query: create table TJOIN2_txt (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN2_txt +PREHOOK: Output: default@tjoin2_txt POSTHOOK: query: create table TJOIN2_txt (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN2_txt +POSTHOOK: Output: default@tjoin2_txt PREHOOK: query: create table if not exists TJOIN3_txt (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN3_txt +PREHOOK: Output: default@tjoin3_txt POSTHOOK: query: create table if not exists TJOIN3_txt (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN3_txt +POSTHOOK: Output: default@tjoin3_txt PREHOOK: query: create table TJOIN4_txt (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN4_txt +PREHOOK: Output: default@tjoin4_txt POSTHOOK: query: create table TJOIN4_txt (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN4_txt +POSTHOOK: Output: default@tjoin4_txt PREHOOK: query: load data local inpath '../../data/files/TJOIN1' into table TJOIN1_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -74,12 +74,12 @@ PREHOOK: query: create table TJOIN1_n0 stored as orc AS SELECT * FROM TJOIN1_txt PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@tjoin1_txt PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN1_n0 +PREHOOK: Output: default@tjoin1_n0 POSTHOOK: query: create table TJOIN1_n0 stored as orc AS SELECT * FROM TJOIN1_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@tjoin1_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN1_n0 +POSTHOOK: Output: default@tjoin1_n0 POSTHOOK: Lineage: tjoin1_n0.c1 SIMPLE [(tjoin1_txt)tjoin1_txt.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin1_n0.c2 SIMPLE [(tjoin1_txt)tjoin1_txt.FieldSchema(name:c2, type:int, comment:null), ] POSTHOOK: Lineage: tjoin1_n0.rnum SIMPLE [(tjoin1_txt)tjoin1_txt.FieldSchema(name:rnum, type:int, comment:null), ] @@ -87,12 +87,12 @@ PREHOOK: query: create table TJOIN2_n0 stored as orc AS SELECT * FROM TJOIN2_txt PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@tjoin2_txt PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN2_n0 +PREHOOK: Output: default@tjoin2_n0 POSTHOOK: query: create table TJOIN2_n0 stored as orc AS SELECT * FROM TJOIN2_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@tjoin2_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN2_n0 +POSTHOOK: Output: default@tjoin2_n0 POSTHOOK: Lineage: tjoin2_n0.c1 SIMPLE [(tjoin2_txt)tjoin2_txt.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin2_n0.c2 SIMPLE [(tjoin2_txt)tjoin2_txt.FieldSchema(name:c2, type:char(2), comment:null), ] POSTHOOK: Lineage: tjoin2_n0.rnum SIMPLE [(tjoin2_txt)tjoin2_txt.FieldSchema(name:rnum, type:int, comment:null), ] @@ -100,12 +100,12 @@ PREHOOK: query: create table TJOIN3 stored as orc AS SELECT * FROM TJOIN3_txt PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@tjoin3_txt PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN3 +PREHOOK: Output: default@tjoin3 POSTHOOK: query: create table TJOIN3 stored as orc AS SELECT * FROM TJOIN3_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@tjoin3_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN3 +POSTHOOK: Output: default@tjoin3 POSTHOOK: Lineage: tjoin3.c1 SIMPLE [(tjoin3_txt)tjoin3_txt.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin3.c2 SIMPLE [(tjoin3_txt)tjoin3_txt.FieldSchema(name:c2, type:char(2), comment:null), ] POSTHOOK: Lineage: tjoin3.rnum SIMPLE [(tjoin3_txt)tjoin3_txt.FieldSchema(name:rnum, type:int, comment:null), ] @@ -113,12 +113,12 @@ PREHOOK: query: create table TJOIN4 stored as orc AS SELECT * FROM TJOIN4_txt PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@tjoin4_txt PREHOOK: Output: database:default -PREHOOK: Output: default@TJOIN4 +PREHOOK: Output: default@tjoin4 POSTHOOK: query: create table TJOIN4 stored as orc AS SELECT * FROM TJOIN4_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@tjoin4_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@TJOIN4 +POSTHOOK: Output: default@tjoin4 POSTHOOK: Lineage: tjoin4.c1 SIMPLE [(tjoin4_txt)tjoin4_txt.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin4.c2 SIMPLE [(tjoin4_txt)tjoin4_txt.FieldSchema(name:c2, type:char(2), comment:null), ] POSTHOOK: Lineage: tjoin4.rnum SIMPLE [(tjoin4_txt)tjoin4_txt.FieldSchema(name:rnum, type:int, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vector_udf2.q.out ql/src/test/results/clientpositive/llap/vector_udf2.q.out index 266211c2c5..e51903f0a7 100644 --- ql/src/test/results/clientpositive/llap/vector_udf2.q.out +++ ql/src/test/results/clientpositive/llap/vector_udf2.q.out @@ -145,11 +145,11 @@ POSTHOOK: Output: default@varchar_udf_2 PREHOOK: query: create temporary table HIVE_14349 (a string) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@HIVE_14349 +PREHOOK: Output: default@hive_14349 POSTHOOK: query: create temporary table HIVE_14349 (a string) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@HIVE_14349 +POSTHOOK: Output: default@hive_14349 PREHOOK: query: insert into HIVE_14349 values('XYZa'), ('badXYZa') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/vector_windowing.q.out ql/src/test/results/clientpositive/llap/vector_windowing.q.out index ca3c6337bf..b4084002b4 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -4729,7 +4729,7 @@ STAGE PLANS: round(sum(`part`.`p_retailprice`),2) as `s` from `default`.`part` group by `part`.`p_mfgr`, `part`.`p_brand` - name: default.mfgr_price_view_n2 + name: hive.default.mfgr_price_view_n2 original text: select p_mfgr, p_brand, round(sum(p_retailprice),2) as s from part @@ -5061,7 +5061,7 @@ STAGE PLANS: round(sum(`part`.`p_retailprice`) over w1,2) as `s` from `default`.`part` window w1 as (distribute by `part`.`p_mfgr` sort by `part`.`p_name` rows between 2 preceding and current row) - name: default.mfgr_brand_price_view_n0 + name: hive.default.mfgr_brand_price_view_n0 original text: select p_mfgr, p_brand, round(sum(p_retailprice) over w1,2) as s from part diff --git ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out index 314300f371..36f26836c6 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out @@ -779,13 +779,13 @@ select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default -PREHOOK: Output: default@sB +PREHOOK: Output: default@sb POSTHOOK: query: create table sB ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE as select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from alltypesorc) a where r < 5 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: database:default -POSTHOOK: Output: default@sB +POSTHOOK: Output: default@sb POSTHOOK: Lineage: sb.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: sb.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] POSTHOOK: Lineage: sb.r SCRIPT [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] @@ -815,14 +815,14 @@ select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default -PREHOOK: Output: default@sD +PREHOOK: Output: default@sd POSTHOOK: query: explain vectorization detail create table sD ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE as select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from alltypesorc) a where r < 5 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: database:default -POSTHOOK: Output: default@sD +POSTHOOK: Output: default@sd Explain PLAN VECTORIZATION: enabled: true @@ -939,7 +939,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.sD + name: default.sd Select Operator expressions: _col0 (type: tinyint), _col1 (type: double), _col2 (type: int) outputColumnNames: col1, col2, col3 @@ -982,11 +982,11 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: ctinyint tinyint, cdouble double, r int - name: default.sD field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.sd Stage: Stage-3 Stats Work @@ -994,7 +994,7 @@ STAGE PLANS: Column Stats Desc: Columns: ctinyint, cdouble, r Column Types: tinyint, double, int - Table: default.sD + Table: default.sd Stage: Stage-0 Move Operator @@ -1007,13 +1007,13 @@ select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default -PREHOOK: Output: default@sD +PREHOOK: Output: default@sd POSTHOOK: query: create table sD ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE as select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from alltypesorc) a where r < 5 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: database:default -POSTHOOK: Output: default@sD +POSTHOOK: Output: default@sd POSTHOOK: Lineage: sd.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: sd.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] POSTHOOK: Lineage: sd.r SCRIPT [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] diff --git ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out index a655c16fc6..54b28ddaa9 100644 --- ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out @@ -1466,12 +1466,12 @@ PREHOOK: query: create table orcTbl (t1 tinyint, t2 tinyint) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@orcTbl +PREHOOK: Output: default@orctbl POSTHOOK: query: create table orcTbl (t1 tinyint, t2 tinyint) stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@orcTbl +POSTHOOK: Output: default@orctbl PREHOOK: query: insert into orcTbl values (54, 9), (-104, 25), (-112, 24) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table @@ -1549,12 +1549,12 @@ PREHOOK: query: create table parquetTbl (t1 tinyint, t2 tinyint) stored as parquet PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@parquetTbl +PREHOOK: Output: default@parquettbl POSTHOOK: query: create table parquetTbl (t1 tinyint, t2 tinyint) stored as parquet POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@parquetTbl +POSTHOOK: Output: default@parquettbl PREHOOK: query: insert into parquetTbl values (54, 9), (-104, 25), (-112, 24) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out index c40dfa906e..1695092e93 100644 --- ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out @@ -32,28 +32,28 @@ PREHOOK: query: create table vsmb_bucket_RC(key int, value string) STORED AS RCFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@vsmb_bucket_RC +PREHOOK: Output: default@vsmb_bucket_rc POSTHOOK: query: create table vsmb_bucket_RC(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@vsmb_bucket_RC +POSTHOOK: Output: default@vsmb_bucket_rc PREHOOK: query: create table vsmb_bucket_TXT(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@vsmb_bucket_TXT +PREHOOK: Output: default@vsmb_bucket_txt POSTHOOK: query: create table vsmb_bucket_TXT(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@vsmb_bucket_TXT +POSTHOOK: Output: default@vsmb_bucket_txt PREHOOK: query: insert into table vsmb_bucket_1 select cint, cstring1 from alltypesorc limit 2 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index d7f8f73b9b..32ad994dc5 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -171,10 +171,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: ds string, date string - name: default.srcpart_date_n8 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.srcpart_date_n8 Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/vectorized_rcfile_columnar.q.out ql/src/test/results/clientpositive/llap/vectorized_rcfile_columnar.q.out index f6adf3c702..b71ad32621 100644 --- ql/src/test/results/clientpositive/llap/vectorized_rcfile_columnar.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_rcfile_columnar.q.out @@ -6,7 +6,7 @@ STORED AS OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@columnTable_n0 +PREHOOK: Output: default@columntable_n0 POSTHOOK: query: CREATE table columnTable_n0 (key STRING, value STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' @@ -15,7 +15,7 @@ STORED AS OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@columnTable_n0 +POSTHOOK: Output: default@columntable_n0 PREHOOK: query: FROM src INSERT OVERWRITE TABLE columnTable_n0 SELECT src.key, src.value ORDER BY src.key, src.value LIMIT 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/merge3.q.out ql/src/test/results/clientpositive/merge3.q.out index fe83b8dd48..6d6474f051 100644 --- ql/src/test/results/clientpositive/merge3.q.out +++ ql/src/test/results/clientpositive/merge3.q.out @@ -227,10 +227,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.merge_src2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.merge_src2 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/multi_insert_with_join2.q.out ql/src/test/results/clientpositive/multi_insert_with_join2.q.out index bdb876e618..faaa603c0f 100644 --- ql/src/test/results/clientpositive/multi_insert_with_join2.q.out +++ ql/src/test/results/clientpositive/multi_insert_with_join2.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE T_A ( id STRING, val STRING ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T_A +PREHOOK: Output: default@t_a POSTHOOK: query: CREATE TABLE T_A ( id STRING, val STRING ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T_A +POSTHOOK: Output: default@t_a PREHOOK: query: CREATE TABLE T_B ( id STRING, val STRING ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T_B +PREHOOK: Output: default@t_b POSTHOOK: query: CREATE TABLE T_B ( id STRING, val STRING ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T_B +POSTHOOK: Output: default@t_b PREHOOK: query: CREATE TABLE join_result_1 ( ida STRING, vala STRING, idb STRING, valb STRING ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/multigroupby_singlemr.q.out ql/src/test/results/clientpositive/multigroupby_singlemr.q.out index 3ae1152645..cf52497cef 100644 --- ql/src/test/results/clientpositive/multigroupby_singlemr.q.out +++ ql/src/test/results/clientpositive/multigroupby_singlemr.q.out @@ -1,43 +1,43 @@ PREHOOK: query: CREATE TABLE TBL_n0(C1 INT, C2 INT, C3 INT, C4 INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TBL_n0 +PREHOOK: Output: default@tbl_n0 POSTHOOK: query: CREATE TABLE TBL_n0(C1 INT, C2 INT, C3 INT, C4 INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TBL_n0 +POSTHOOK: Output: default@tbl_n0 PREHOOK: query: CREATE TABLE DEST1_n116(d1 INT, d2 INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n116 +PREHOOK: Output: default@dest1_n116 POSTHOOK: query: CREATE TABLE DEST1_n116(d1 INT, d2 INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n116 +POSTHOOK: Output: default@dest1_n116 PREHOOK: query: CREATE TABLE DEST2_n30(d1 INT, d2 INT, d3 INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n30 +PREHOOK: Output: default@dest2_n30 POSTHOOK: query: CREATE TABLE DEST2_n30(d1 INT, d2 INT, d3 INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n30 +POSTHOOK: Output: default@dest2_n30 PREHOOK: query: CREATE TABLE DEST3_n4(d1 INT, d2 INT, d3 INT, d4 INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST3_n4 +PREHOOK: Output: default@dest3_n4 POSTHOOK: query: CREATE TABLE DEST3_n4(d1 INT, d2 INT, d3 INT, d4 INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST3_n4 +POSTHOOK: Output: default@dest3_n4 PREHOOK: query: CREATE TABLE DEST4(d1 INT, d2 INT, d3 INT, d4 INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST4 +PREHOOK: Output: default@dest4 POSTHOOK: query: CREATE TABLE DEST4(d1 INT, d2 INT, d3 INT, d4 INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST4 +POSTHOOK: Output: default@dest4 PREHOOK: query: EXPLAIN FROM TBL_n0 INSERT OVERWRITE TABLE DEST1_n116 SELECT TBL_n0.C1, COUNT(TBL_n0.C2) GROUP BY TBL_n0.C1 diff --git ql/src/test/results/clientpositive/nonmr_fetch.q.out ql/src/test/results/clientpositive/nonmr_fetch.q.out index 9e2c40d157..520b855376 100644 --- ql/src/test/results/clientpositive/nonmr_fetch.q.out +++ ql/src/test/results/clientpositive/nonmr_fetch.q.out @@ -1372,10 +1372,10 @@ STAGE PLANS: Stage: Stage-4 Create Table columns: key string, value string - name: default.srcx input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.srcx Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/nullformat.q.out ql/src/test/results/clientpositive/nullformat.q.out index 98bd08399f..3710e58b1f 100644 --- ql/src/test/results/clientpositive/nullformat.q.out +++ ql/src/test/results/clientpositive/nullformat.q.out @@ -49,10 +49,10 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: a string, b string - name: default.null_tab1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.null_tab1 PREHOOK: query: CREATE TABLE null_tab1(a STRING, b STRING) ROW FORMAT DELIMITED NULL DEFINED AS 'fooNull' PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/nullformatCTAS.q.out ql/src/test/results/clientpositive/nullformatCTAS.q.out index 792abe701a..7deb5fe29e 100644 --- ql/src/test/results/clientpositive/nullformatCTAS.q.out +++ ql/src/test/results/clientpositive/nullformatCTAS.q.out @@ -123,10 +123,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: a string, b string - name: default.null_tab3 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.null_tab3 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/parallel_orderby.q.out ql/src/test/results/clientpositive/parallel_orderby.q.out index 4349a145dd..cf46a4ac57 100644 --- ql/src/test/results/clientpositive/parallel_orderby.q.out +++ ql/src/test/results/clientpositive/parallel_orderby.q.out @@ -100,10 +100,10 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: key string, value string - name: default.total_ordered input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.total_ordered Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/parquet_schema_evolution.q.out ql/src/test/results/clientpositive/parquet_schema_evolution.q.out index 3c38ed0705..16970837ad 100644 --- ql/src/test/results/clientpositive/parquet_schema_evolution.q.out +++ ql/src/test/results/clientpositive/parquet_schema_evolution.q.out @@ -9,11 +9,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE NewStructField(a struct, a2:struct>) STORED AS PARQUET PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@NewStructField +PREHOOK: Output: default@newstructfield POSTHOOK: query: CREATE TABLE NewStructField(a struct, a2:struct>) STORED AS PARQUET POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@NewStructField +POSTHOOK: Output: default@newstructfield PREHOOK: query: INSERT OVERWRITE TABLE NewStructField SELECT named_struct('a1', map('k1','v1'), 'a2', named_struct('e1',5)) FROM srcpart LIMIT 5 PREHOOK: type: QUERY PREHOOK: Input: default@srcpart @@ -84,12 +84,12 @@ PREHOOK: query: CREATE TABLE NewStructFieldTable STORED AS PARQUET AS SELECT * F PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@newstructfield PREHOOK: Output: database:default -PREHOOK: Output: default@NewStructFieldTable +PREHOOK: Output: default@newstructfieldtable POSTHOOK: query: CREATE TABLE NewStructFieldTable STORED AS PARQUET AS SELECT * FROM NewStructField POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@newstructfield POSTHOOK: Output: database:default -POSTHOOK: Output: default@NewStructFieldTable +POSTHOOK: Output: default@newstructfieldtable POSTHOOK: Lineage: newstructfieldtable.a SIMPLE [(newstructfield)newstructfield.FieldSchema(name:a, type:struct,a2:struct,a3:int>, comment:null), ] POSTHOOK: Lineage: newstructfieldtable.b SIMPLE [(newstructfield)newstructfield.FieldSchema(name:b, type:int, comment:null), ] PREHOOK: query: DESCRIBE NewStructFieldTable diff --git ql/src/test/results/clientpositive/pointlookup.q.out ql/src/test/results/clientpositive/pointlookup.q.out index a128626231..77273eda99 100644 --- ql/src/test/results/clientpositive/pointlookup.q.out +++ ql/src/test/results/clientpositive/pointlookup.q.out @@ -86,7 +86,7 @@ WHERE PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@orOutput +PREHOOK: Output: default@oroutput POSTHOOK: query: create table orOutput as SELECT key FROM src @@ -105,7 +105,7 @@ WHERE POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@orOutput +POSTHOOK: Output: default@oroutput POSTHOOK: Lineage: oroutput.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain SELECT key @@ -195,7 +195,7 @@ WHERE PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@inOutput +PREHOOK: Output: default@inoutput POSTHOOK: query: create table inOutput as SELECT key FROM src @@ -214,7 +214,7 @@ WHERE POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@inOutput +POSTHOOK: Output: default@inoutput POSTHOOK: Lineage: inoutput.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain SELECT key @@ -304,7 +304,7 @@ WHERE PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@inOutputOpt +PREHOOK: Output: default@inoutputopt POSTHOOK: query: create table inOutputOpt as SELECT key FROM src @@ -323,7 +323,7 @@ WHERE POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@inOutputOpt +POSTHOOK: Output: default@inoutputopt POSTHOOK: Lineage: inoutputopt.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select count(*) from orOutput PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/ppd_deterministic_expr.q.out ql/src/test/results/clientpositive/ppd_deterministic_expr.q.out index 28f5e1f53b..72cd456f6a 100644 --- ql/src/test/results/clientpositive/ppd_deterministic_expr.q.out +++ ql/src/test/results/clientpositive/ppd_deterministic_expr.q.out @@ -199,7 +199,7 @@ where part1 in ('US', 'CA') PREHOOK: type: CREATEVIEW PREHOOK: Input: default@testa PREHOOK: Output: database:default -PREHOOK: Output: default@viewDeterministicUDFA +PREHOOK: Output: default@viewdeterministicudfa POSTHOOK: query: create view viewDeterministicUDFA partitioned on (vpart1, vpart2, vpart3) as select cast(col1 as decimal(38,18)) as vcol1, cast(col2 as decimal(38,18)) as vcol2, @@ -214,12 +214,12 @@ where part1 in ('US', 'CA') POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@testa POSTHOOK: Output: database:default -POSTHOOK: Output: default@viewDeterministicUDFA -POSTHOOK: Lineage: viewDeterministicUDFA.vcol1 EXPRESSION [(testa)testa.FieldSchema(name:col1, type:string, comment:), ] -POSTHOOK: Lineage: viewDeterministicUDFA.vcol2 EXPRESSION [(testa)testa.FieldSchema(name:col2, type:string, comment:), ] -POSTHOOK: Lineage: viewDeterministicUDFA.vcol3 EXPRESSION [(testa)testa.FieldSchema(name:col3, type:string, comment:), ] -POSTHOOK: Lineage: viewDeterministicUDFA.vcol4 EXPRESSION [(testa)testa.FieldSchema(name:col4, type:string, comment:), ] -POSTHOOK: Lineage: viewDeterministicUDFA.vcol5 EXPRESSION [(testa)testa.FieldSchema(name:col5, type:string, comment:), ] +POSTHOOK: Output: default@viewdeterministicudfa +POSTHOOK: Lineage: viewdeterministicudfa.vcol1 EXPRESSION [(testa)testa.FieldSchema(name:col1, type:string, comment:), ] +POSTHOOK: Lineage: viewdeterministicudfa.vcol2 EXPRESSION [(testa)testa.FieldSchema(name:col2, type:string, comment:), ] +POSTHOOK: Lineage: viewdeterministicudfa.vcol3 EXPRESSION [(testa)testa.FieldSchema(name:col3, type:string, comment:), ] +POSTHOOK: Lineage: viewdeterministicudfa.vcol4 EXPRESSION [(testa)testa.FieldSchema(name:col4, type:string, comment:), ] +POSTHOOK: Lineage: viewdeterministicudfa.vcol5 EXPRESSION [(testa)testa.FieldSchema(name:col5, type:string, comment:), ] PREHOOK: query: create view viewDeterministicUDFB partitioned on (vpart1, vpart2, vpart3) as select cast(cola as decimal(38,18)) as vcolA, cast(colb as decimal(38,18)) as vcolB, @@ -232,7 +232,7 @@ where part1 in ('US', 'CA') PREHOOK: type: CREATEVIEW PREHOOK: Input: default@testb PREHOOK: Output: database:default -PREHOOK: Output: default@viewDeterministicUDFB +PREHOOK: Output: default@viewdeterministicudfb POSTHOOK: query: create view viewDeterministicUDFB partitioned on (vpart1, vpart2, vpart3) as select cast(cola as decimal(38,18)) as vcolA, cast(colb as decimal(38,18)) as vcolB, @@ -245,10 +245,10 @@ where part1 in ('US', 'CA') POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@testb POSTHOOK: Output: database:default -POSTHOOK: Output: default@viewDeterministicUDFB -POSTHOOK: Lineage: viewDeterministicUDFB.vcola EXPRESSION [(testb)testb.FieldSchema(name:cola, type:string, comment:), ] -POSTHOOK: Lineage: viewDeterministicUDFB.vcolb EXPRESSION [(testb)testb.FieldSchema(name:colb, type:string, comment:), ] -POSTHOOK: Lineage: viewDeterministicUDFB.vcolc EXPRESSION [(testb)testb.FieldSchema(name:colc, type:string, comment:), ] +POSTHOOK: Output: default@viewdeterministicudfb +POSTHOOK: Lineage: viewdeterministicudfb.vcola EXPRESSION [(testb)testb.FieldSchema(name:cola, type:string, comment:), ] +POSTHOOK: Lineage: viewdeterministicudfb.vcolb EXPRESSION [(testb)testb.FieldSchema(name:colb, type:string, comment:), ] +POSTHOOK: Lineage: viewdeterministicudfb.vcolc EXPRESSION [(testb)testb.FieldSchema(name:colc, type:string, comment:), ] PREHOOK: query: create view viewNoUDFA partitioned on (part1, part2, part3) as select cast(col1 as decimal(38,18)) as vcol1, cast(col2 as decimal(38,18)) as vcol2, @@ -263,7 +263,7 @@ where part1 in ('US', 'CA') PREHOOK: type: CREATEVIEW PREHOOK: Input: default@testa PREHOOK: Output: database:default -PREHOOK: Output: default@viewNoUDFA +PREHOOK: Output: default@viewnoudfa POSTHOOK: query: create view viewNoUDFA partitioned on (part1, part2, part3) as select cast(col1 as decimal(38,18)) as vcol1, cast(col2 as decimal(38,18)) as vcol2, @@ -278,12 +278,12 @@ where part1 in ('US', 'CA') POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@testa POSTHOOK: Output: database:default -POSTHOOK: Output: default@viewNoUDFA -POSTHOOK: Lineage: viewNoUDFA.vcol1 EXPRESSION [(testa)testa.FieldSchema(name:col1, type:string, comment:), ] -POSTHOOK: Lineage: viewNoUDFA.vcol2 EXPRESSION [(testa)testa.FieldSchema(name:col2, type:string, comment:), ] -POSTHOOK: Lineage: viewNoUDFA.vcol3 EXPRESSION [(testa)testa.FieldSchema(name:col3, type:string, comment:), ] -POSTHOOK: Lineage: viewNoUDFA.vcol4 EXPRESSION [(testa)testa.FieldSchema(name:col4, type:string, comment:), ] -POSTHOOK: Lineage: viewNoUDFA.vcol5 EXPRESSION [(testa)testa.FieldSchema(name:col5, type:string, comment:), ] +POSTHOOK: Output: default@viewnoudfa +POSTHOOK: Lineage: viewnoudfa.vcol1 EXPRESSION [(testa)testa.FieldSchema(name:col1, type:string, comment:), ] +POSTHOOK: Lineage: viewnoudfa.vcol2 EXPRESSION [(testa)testa.FieldSchema(name:col2, type:string, comment:), ] +POSTHOOK: Lineage: viewnoudfa.vcol3 EXPRESSION [(testa)testa.FieldSchema(name:col3, type:string, comment:), ] +POSTHOOK: Lineage: viewnoudfa.vcol4 EXPRESSION [(testa)testa.FieldSchema(name:col4, type:string, comment:), ] +POSTHOOK: Lineage: viewnoudfa.vcol5 EXPRESSION [(testa)testa.FieldSchema(name:col5, type:string, comment:), ] PREHOOK: query: create view viewNoUDFB partitioned on (part1, part2, part3) as select cast(cola as decimal(38,18)) as vcolA, cast(colb as decimal(38,18)) as vcolB, @@ -296,7 +296,7 @@ where part1 in ('US', 'CA') PREHOOK: type: CREATEVIEW PREHOOK: Input: default@testb PREHOOK: Output: database:default -PREHOOK: Output: default@viewNoUDFB +PREHOOK: Output: default@viewnoudfb POSTHOOK: query: create view viewNoUDFB partitioned on (part1, part2, part3) as select cast(cola as decimal(38,18)) as vcolA, cast(colb as decimal(38,18)) as vcolB, @@ -309,10 +309,10 @@ where part1 in ('US', 'CA') POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@testb POSTHOOK: Output: database:default -POSTHOOK: Output: default@viewNoUDFB -POSTHOOK: Lineage: viewNoUDFB.vcola EXPRESSION [(testb)testb.FieldSchema(name:cola, type:string, comment:), ] -POSTHOOK: Lineage: viewNoUDFB.vcolb EXPRESSION [(testb)testb.FieldSchema(name:colb, type:string, comment:), ] -POSTHOOK: Lineage: viewNoUDFB.vcolc EXPRESSION [(testb)testb.FieldSchema(name:colc, type:string, comment:), ] +POSTHOOK: Output: default@viewnoudfb +POSTHOOK: Lineage: viewnoudfb.vcola EXPRESSION [(testb)testb.FieldSchema(name:cola, type:string, comment:), ] +POSTHOOK: Lineage: viewnoudfb.vcolb EXPRESSION [(testb)testb.FieldSchema(name:colb, type:string, comment:), ] +POSTHOOK: Lineage: viewnoudfb.vcolc EXPRESSION [(testb)testb.FieldSchema(name:colc, type:string, comment:), ] PREHOOK: query: explain select vcol1, vcol2, vcol3, vcola, vcolb from viewDeterministicUDFA a inner join viewDeterministicUDFB b diff --git ql/src/test/results/clientpositive/quotedid_skew.q.out ql/src/test/results/clientpositive/quotedid_skew.q.out index 051462fc9e..e77a68d43b 100644 --- ql/src/test/results/clientpositive/quotedid_skew.q.out +++ ql/src/test/results/clientpositive/quotedid_skew.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n46(`!@#$%^&*()_q` string, `y&y` string) SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n46 +PREHOOK: Output: default@t1_n46 POSTHOOK: query: CREATE TABLE T1_n46(`!@#$%^&*()_q` string, `y&y` string) SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n46 +POSTHOOK: Output: default@t1_n46 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n46 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n28(`!@#$%^&*()_q` string, `y&y` string) SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n28 +PREHOOK: Output: default@t2_n28 POSTHOOK: query: CREATE TABLE T2_n28(`!@#$%^&*()_q` string, `y&y` string) SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n28 +POSTHOOK: Output: default@t2_n28 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T2_n28 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/rcfile_bigdata.q.out ql/src/test/results/clientpositive/rcfile_bigdata.q.out index c1ada45ad0..2793b5c92f 100644 --- ql/src/test/results/clientpositive/rcfile_bigdata.q.out +++ ql/src/test/results/clientpositive/rcfile_bigdata.q.out @@ -6,7 +6,7 @@ STORED AS OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@columnTable_Bigdata +PREHOOK: Output: default@columntable_bigdata POSTHOOK: query: CREATE table columnTable_Bigdata (key STRING, value STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' @@ -15,7 +15,7 @@ STORED AS OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@columnTable_Bigdata +POSTHOOK: Output: default@columntable_bigdata PREHOOK: query: FROM (FROM src MAP src.key,src.value USING 'python dumpdata_script.py' AS (key,value) WHERE src.key = 10) subq INSERT OVERWRITE TABLE columnTable_Bigdata SELECT subq.key, subq.value PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/runtime_skewjoin_mapjoin_spark.q.out ql/src/test/results/clientpositive/runtime_skewjoin_mapjoin_spark.q.out index 9547e4fa7c..156805c892 100644 --- ql/src/test/results/clientpositive/runtime_skewjoin_mapjoin_spark.q.out +++ ql/src/test/results/clientpositive/runtime_skewjoin_mapjoin_spark.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n94(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n94 +PREHOOK: Output: default@t1_n94 POSTHOOK: query: CREATE TABLE T1_n94(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n94 +POSTHOOK: Output: default@t1_n94 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n94 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin.q.out ql/src/test/results/clientpositive/skewjoin.q.out new file mode 100644 index 0000000000..919ca9c881 --- /dev/null +++ ql/src/test/results/clientpositive/skewjoin.q.out @@ -0,0 +1,1647 @@ +PREHOOK: query: CREATE TABLE T1_n128(key STRING, val STRING) STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t1_n128 +POSTHOOK: query: CREATE TABLE T1_n128(key STRING, val STRING) STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t1_n128 +PREHOOK: query: CREATE TABLE T2_n76(key STRING, val STRING) STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t2_n76 +POSTHOOK: query: CREATE TABLE T2_n76(key STRING, val STRING) STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t2_n76 +PREHOOK: query: CREATE TABLE T3_n30(key STRING, val STRING) STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t3_n30 +POSTHOOK: query: CREATE TABLE T3_n30(key STRING, val STRING) STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t3_n30 +PREHOOK: query: CREATE TABLE T4_n17(key STRING, val STRING) STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t4_n17 +POSTHOOK: query: CREATE TABLE T4_n17(key STRING, val STRING) STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t4_n17 +PREHOOK: query: CREATE TABLE dest_j1_n17(key INT, value STRING) STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@dest_j1_n17 +POSTHOOK: query: CREATE TABLE dest_j1_n17(key INT, value STRING) STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dest_j1_n17 +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n128 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@t1_n128 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n128 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@t1_n128 +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n76 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@t2_n76 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n76 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@t2_n76 +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n30 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@t3_n30 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n30 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@t3_n30 +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4_n17 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@t4_n17 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4_n17 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@t4_n17 +PREHOOK: query: EXPLAIN +FROM src src1 JOIN src src2 ON (src1.key = src2.key) +INSERT OVERWRITE TABLE dest_j1_n17 SELECT src1.key, src2.value +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@dest_j1_n17 +POSTHOOK: query: EXPLAIN +FROM src src1 JOIN src src2 ON (src1.key = src2.key) +INSERT OVERWRITE TABLE dest_j1_n17 SELECT src1.key, src2.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@dest_j1_n17 +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-6 depends on stages: Stage-1 , consists of Stage-7, Stage-0, Stage-3 + Stage-7 + Stage-5 depends on stages: Stage-7 + Stage-0 depends on stages: Stage-5 + Stage-2 depends on stages: Stage-0, Stage-3 + Stage-3 depends on stages: Stage-5 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + TableScan + alias: src2 + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + handleSkewJoin: true + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col2 + Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: UDFToInteger(_col0) (type: int), _col2 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 791 Data size: 75145 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 791 Data size: 75145 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1_n17 + Select Operator + expressions: _col0 (type: int), _col1 (type: string) + outputColumnNames: key, value + Statistics: Num rows: 791 Data size: 75145 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-6 + Conditional Operator + + Stage: Stage-7 + Map Reduce Local Work + Alias -> Map Local Tables: + 1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + 1 + TableScan + HashTable Sink Operator + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + + Stage: Stage-5 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + outputColumnNames: _col0, _col2 + Select Operator + expressions: UDFToInteger(_col0) (type: int), _col2 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 791 Data size: 75145 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 791 Data size: 75145 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1_n17 + Select Operator + expressions: _col0 (type: int), _col1 (type: string) + outputColumnNames: key, value + Statistics: Num rows: 791 Data size: 75145 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Move Operator + tables: + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1_n17 + + Stage: Stage-2 + Stats Work + Basic Stats Work: + Column Stats Desc: + Columns: key, value + Column Types: int, string + Table: default.dest_j1_n17 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: struct), _col1 (type: struct) + Reduce Operator Tree: + Group By Operator + aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + +PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) +INSERT OVERWRITE TABLE dest_j1_n17 SELECT src1.key, src2.value +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@dest_j1_n17 +POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) +INSERT OVERWRITE TABLE dest_j1_n17 SELECT src1.key, src2.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@dest_j1_n17 +POSTHOOK: Lineage: dest_j1_n17.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1_n17.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1_n17 +PREHOOK: type: QUERY +PREHOOK: Input: default@dest_j1_n17 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1_n17 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@dest_j1_n17 +#### A masked pattern was here #### +278697 101852390308 +PREHOOK: query: EXPLAIN +SELECT /*+ STREAMTABLE(a) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +PREHOOK: Input: default@t3_n30 +PREHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN +SELECT /*+ STREAMTABLE(a) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +POSTHOOK: Input: default@t3_n30 +POSTHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-3 depends on stages: Stage-2 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 key (type: string) + 1 key (type: string) + outputColumnNames: _col0, _col1, _col5, _col6 + Statistics: Num rows: 1 Data size: 404 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col5 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col5 (type: string) + Statistics: Num rows: 1 Data size: 404 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: string), _col1 (type: string), _col6 (type: string) + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col5 (type: string) + 1 key (type: string) + outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 + Statistics: Num rows: 1 Data size: 444 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col10 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col10 (type: string) + Statistics: Num rows: 1 Data size: 444 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col11 (type: string) + TableScan + alias: d + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: string) + 1 key (type: string) + outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11, _col15, _col16 + Statistics: Num rows: 1 Data size: 488 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col10 (type: string), _col11 (type: string), _col15 (type: string), _col16 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 1 Data size: 488 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 488 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT /*+ STREAMTABLE(a) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +PREHOOK: Input: default@t3_n30 +PREHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +POSTHOOK: query: SELECT /*+ STREAMTABLE(a) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +POSTHOOK: Input: default@t3_n30 +POSTHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +2 12 2 22 2 12 2 12 +PREHOOK: query: EXPLAIN +SELECT /*+ STREAMTABLE(a,c) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +PREHOOK: Input: default@t3_n30 +PREHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN +SELECT /*+ STREAMTABLE(a,c) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +POSTHOOK: Input: default@t3_n30 +POSTHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-8 depends on stages: Stage-2 , consists of Stage-9, Stage-3 + Stage-9 + Stage-7 depends on stages: Stage-9 + Stage-3 depends on stages: Stage-7 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 key (type: string) + 1 key (type: string) + outputColumnNames: _col0, _col1, _col5, _col6 + Statistics: Num rows: 1 Data size: 404 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col5 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col5 (type: string) + Statistics: Num rows: 1 Data size: 404 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: string), _col1 (type: string), _col6 (type: string) + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + handleSkewJoin: true + keys: + 0 _col5 (type: string) + 1 key (type: string) + outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 + Statistics: Num rows: 1 Data size: 444 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-8 + Conditional Operator + + Stage: Stage-9 + Map Reduce Local Work + Alias -> Map Local Tables: + 1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + 1 + TableScan + HashTable Sink Operator + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + + Stage: Stage-7 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col10 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col10 (type: string) + Statistics: Num rows: 1 Data size: 444 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col11 (type: string) + TableScan + alias: d + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: key (type: string) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: val (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: string) + 1 key (type: string) + outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11, _col15, _col16 + Statistics: Num rows: 1 Data size: 488 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col10 (type: string), _col11 (type: string), _col15 (type: string), _col16 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 1 Data size: 488 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 488 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT /*+ STREAMTABLE(a,c) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +PREHOOK: Input: default@t3_n30 +PREHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +POSTHOOK: query: SELECT /*+ STREAMTABLE(a,c) */ * +FROM T1_n128 a JOIN T2_n76 b ON a.key = b.key + JOIN T3_n30 c ON b.key = c.key + JOIN T4_n17 d ON c.key = d.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +POSTHOOK: Input: default@t3_n30 +POSTHOOK: Input: default@t4_n17 +#### A masked pattern was here #### +2 12 2 22 2 12 2 12 +PREHOOK: query: EXPLAIN FROM T1_n128 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN FROM T1_n128 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: a + filterExpr: UDFToDouble(key) is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: UDFToDouble(key) is not null (type: boolean) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: UDFToDouble(key) (type: double) + null sort order: z + sort order: + + Map-reduce partition columns: UDFToDouble(key) (type: double) + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + value expressions: key (type: string), val (type: string) + TableScan + alias: c + filterExpr: (key + 1) is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (key + 1) is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: (key + 1) (type: double) + null sort order: z + sort order: + + Map-reduce partition columns: (key + 1) (type: double) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: key (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 UDFToDouble(key) (type: double) + 1 (key + 1) (type: double) + outputColumnNames: _col0, _col1, _col5 + Statistics: Num rows: 550 Data size: 47850 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col0)), sum(hash(_col1)), sum(hash(_col5)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: FROM T1_n128 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: FROM T1_n128 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +198 6274 194 +PREHOOK: query: EXPLAIN FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-5 depends on stages: Stage-1 , consists of Stage-6, Stage-2 + Stage-6 + Stage-4 depends on stages: Stage-6 + Stage-2 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + TableScan + alias: src + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + handleSkewJoin: true + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(hash(_col2)), sum(hash(_col3)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-5 + Conditional Operator + + Stage: Stage-6 + Map Reduce Local Work + Alias -> Map Local Tables: + 1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + 1 + TableScan + HashTable Sink Operator + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + + Stage: Stage-4 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + outputColumnNames: _col2, _col3 + Group By Operator + aggregations: sum(hash(_col2)), sum(hash(_col3)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +44481300 101852390308 +PREHOOK: query: EXPLAIN FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key and substring(x.value, 5)=substring(y.value, 5)+1) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key and substring(x.value, 5)=substring(y.value, 5)+1) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-5 depends on stages: Stage-1 , consists of Stage-6, Stage-2 + Stage-6 + Stage-4 depends on stages: Stage-6 + Stage-2 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src + filterExpr: (key is not null and UDFToDouble(substring(value, 5)) is not null) (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (key is not null and UDFToDouble(substring(value, 5)) is not null) (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string), UDFToDouble(substring(_col1, 5)) (type: double) + null sort order: zz + sort order: ++ + Map-reduce partition columns: _col0 (type: string), UDFToDouble(substring(_col1, 5)) (type: double) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + TableScan + alias: src + filterExpr: (key is not null and (substring(value, 5) + 1) is not null) (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (key is not null and (substring(value, 5) + 1) is not null) (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string), (substring(_col1, 5) + 1) (type: double) + null sort order: zz + sort order: ++ + Map-reduce partition columns: _col0 (type: string), (substring(_col1, 5) + 1) (type: double) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + handleSkewJoin: true + keys: + 0 _col0 (type: string), UDFToDouble(substring(_col1, 5)) (type: double) + 1 _col0 (type: string), (substring(_col1, 5) + 1) (type: double) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(hash(_col2)), sum(hash(_col3)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-5 + Conditional Operator + + Stage: Stage-6 + Map Reduce Local Work + Alias -> Map Local Tables: + 1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + 1 + TableScan + HashTable Sink Operator + keys: + 0 reducesinkkey0 (type: string), reducesinkkey1 (type: double) + 1 reducesinkkey0 (type: string), reducesinkkey1 (type: double) + + Stage: Stage-4 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 reducesinkkey0 (type: string), reducesinkkey1 (type: double) + 1 reducesinkkey0 (type: string), reducesinkkey1 (type: double) + outputColumnNames: _col2, _col3 + Group By Operator + aggregations: sum(hash(_col2)), sum(hash(_col3)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key and substring(x.value, 5)=substring(y.value, 5)+1) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: FROM +(SELECT src.* FROM src) x +JOIN +(SELECT src.* FROM src) Y +ON (x.key = Y.key and substring(x.value, 5)=substring(y.value, 5)+1) +SELECT sum(hash(Y.key)), sum(hash(Y.value)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +NULL NULL +PREHOOK: query: EXPLAIN +SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 = src3.c5 AND src3.c5 < 80 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN +SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 = src3.c5 AND src3.c5 < 80 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-9 depends on stages: Stage-1 , consists of Stage-11, Stage-2 + Stage-11 + Stage-8 depends on stages: Stage-11 + Stage-2 depends on stages: Stage-8 + Stage-7 depends on stages: Stage-2 , consists of Stage-10, Stage-3 + Stage-10 + Stage-6 depends on stages: Stage-10 + Stage-3 depends on stages: Stage-6 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src + filterExpr: ((key < 100) and (key < 80)) (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((key < 100) and (key < 80)) (type: boolean) + Statistics: Num rows: 55 Data size: 4785 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 55 Data size: 4785 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 55 Data size: 4785 Basic stats: COMPLETE Column stats: COMPLETE + TableScan + alias: src + filterExpr: ((key < 100) and (key < 80)) (type: boolean) + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: ((key < 100) and (key < 80)) (type: boolean) + Statistics: Num rows: 55 Data size: 9790 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 55 Data size: 9790 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 55 Data size: 9790 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + handleSkewJoin: true + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col3 + Statistics: Num rows: 86 Data size: 15308 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-9 + Conditional Operator + + Stage: Stage-11 + Map Reduce Local Work + Alias -> Map Local Tables: + 1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + 1 + TableScan + HashTable Sink Operator + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + + Stage: Stage-8 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + outputColumnNames: _col0, _col3 + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 86 Data size: 15308 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col3 (type: string) + TableScan + alias: src + filterExpr: (key < 80) (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (key < 80) (type: boolean) + Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + null sort order: z + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + handleSkewJoin: true + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col3 + Statistics: Num rows: 135 Data size: 24030 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(hash(_col0)), sum(hash(_col3)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-7 + Conditional Operator + + Stage: Stage-10 + Map Reduce Local Work + Alias -> Map Local Tables: + 1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + 1 + TableScan + HashTable Sink Operator + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + + Stage: Stage-6 + Map Reduce + Map Operator Tree: + TableScan + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 reducesinkkey0 (type: string) + 1 reducesinkkey0 (type: string) + outputColumnNames: _col0, _col3 + Group By Operator + aggregations: sum(hash(_col0)), sum(hash(_col3)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 = src3.c5 AND src3.c5 < 80 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) +FROM +(SELECT src.key as c1, src.value as c2 from src) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src) src2 +ON src1.c1 = src2.c3 AND src1.c1 < 100 +JOIN +(SELECT src.key as c5, src.value as c6 from src) src3 +ON src1.c1 = src3.c5 AND src3.c5 < 80 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +293143 -136853010385 +PREHOOK: query: EXPLAIN +SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1_n128 k LEFT OUTER JOIN T1_n128 v ON k.key+1=v.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN +SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1_n128 k LEFT OUTER JOIN T1_n128 v ON k.key+1=v.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-3 is a root stage + Stage-1 depends on stages: Stage-3 + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-3 + Map Reduce Local Work + Alias -> Map Local Tables: + v + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + v + TableScan + alias: v + Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 (key + 1) (type: double) + 1 UDFToDouble(key) (type: double) + + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: k + Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 (key + 1) (type: double) + 1 UDFToDouble(key) (type: double) + outputColumnNames: _col0, _col6 + Statistics: Num rows: 1 Data size: 202 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col0)), sum(hash(_col6)) + minReductionHashAggr: 0.99 + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + null sort order: + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Local Work: + Map Reduce Local Work + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1_n128 k LEFT OUTER JOIN T1_n128 v ON k.key+1=v.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1_n128 k LEFT OUTER JOIN T1_n128 v ON k.key+1=v.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +372 6320 +PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k join T1_n128 v on k.key=v.val +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k join T1_n128 v on k.key=v.val +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +NULL NULL +PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k join T1_n128 v on k.key=v.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k join T1_n128 v on k.key=v.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +429 12643 +PREHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k join T1_n128 v on k.key=v.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k join T1_n128 v on k.key=v.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +429 12643 +PREHOOK: query: select count(1) from T1_n128 a join T1_n128 b on a.key = b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: select count(1) from T1_n128 a join T1_n128 b on a.key = b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +8 +PREHOOK: query: FROM T1_n128 a LEFT OUTER JOIN T2_n76 c ON c.key+1=a.key SELECT sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +POSTHOOK: query: FROM T1_n128 a LEFT OUTER JOIN T2_n76 c ON c.key+1=a.key SELECT sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +317 9462 50 +PREHOOK: query: FROM T1_n128 a RIGHT OUTER JOIN T2_n76 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +POSTHOOK: query: FROM T1_n128 a RIGHT OUTER JOIN T2_n76 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +51 1570 318 +PREHOOK: query: FROM T1_n128 a FULL OUTER JOIN T2_n76 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +POSTHOOK: query: FROM T1_n128 a FULL OUTER JOIN T2_n76 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +317 9462 318 +PREHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1_n128 src1 LEFT OUTER JOIN T2_n76 src2 ON src1.key+1 = src2.key RIGHT OUTER JOIN T2_n76 src3 ON src2.key = src3.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1_n128 src1 LEFT OUTER JOIN T2_n76 src2 ON src1.key+1 = src2.key RIGHT OUTER JOIN T2_n76 src3 ON src2.key = src3.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +370 11003 377 +PREHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1_n128 src1 JOIN T2_n76 src2 ON src1.key+1 = src2.key JOIN T2_n76 src3 ON src2.key = src3.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +PREHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1_n128 src1 JOIN T2_n76 src2 ON src1.key+1 = src2.key JOIN T2_n76 src3 ON src2.key = src3.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +POSTHOOK: Input: default@t2_n76 +#### A masked pattern was here #### +370 11003 377 +PREHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k left outer join T1_n128 v on k.key+1=v.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1_n128 k left outer join T1_n128 v on k.key+1=v.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n128 +#### A masked pattern was here #### +372 6320 diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out index 537085f80b..35ee1fe165 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n67(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n67 +PREHOOK: Output: default@t1_n67 POSTHOOK: query: CREATE TABLE T1_n67(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n67 +POSTHOOK: Output: default@t1_n67 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n67 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n40(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n40 +PREHOOK: Output: default@t2_n40 POSTHOOK: query: CREATE TABLE T2_n40(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n40 +POSTHOOK: Output: default@t2_n40 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n40 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out index e6bd67a78b..ade6f5d569 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE tmpT1_n0(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tmpT1_n0 +PREHOOK: Output: default@tmpt1_n0 POSTHOOK: query: CREATE TABLE tmpT1_n0(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tmpT1_n0 +POSTHOOK: Output: default@tmpt1_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@tmpt1_n0 PREHOOK: query: CREATE TABLE T1_n151(key INT, val STRING) SKEWED BY (key) ON ((2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n151 +PREHOOK: Output: default@t1_n151 POSTHOOK: query: CREATE TABLE T1_n151(key INT, val STRING) SKEWED BY (key) ON ((2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n151 +POSTHOOK: Output: default@t1_n151 PREHOOK: query: INSERT OVERWRITE TABLE T1_n151 SELECT key, val FROM tmpT1_n0 PREHOOK: type: QUERY PREHOOK: Input: default@tmpt1_n0 @@ -35,11 +35,11 @@ POSTHOOK: Lineage: t1_n151.val SIMPLE [(tmpt1_n0)tmpt1_n0.FieldSchema(name:val, PREHOOK: query: CREATE TABLE tmpT2_n0(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tmpT2_n0 +PREHOOK: Output: default@tmpt2_n0 POSTHOOK: query: CREATE TABLE tmpT2_n0(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tmpT2_n0 +POSTHOOK: Output: default@tmpt2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE tmpT2_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -51,11 +51,11 @@ POSTHOOK: Output: default@tmpt2_n0 PREHOOK: query: CREATE TABLE T2_n88(key INT, val STRING) SKEWED BY (key) ON ((3)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n88 +PREHOOK: Output: default@t2_n88 POSTHOOK: query: CREATE TABLE T2_n88(key INT, val STRING) SKEWED BY (key) ON ((3)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n88 +POSTHOOK: Output: default@t2_n88 PREHOOK: query: INSERT OVERWRITE TABLE T2_n88 SELECT key, val FROM tmpT2_n0 PREHOOK: type: QUERY PREHOOK: Input: default@tmpt2_n0 diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin11.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin11.q.out index 5fc405ba9a..d9f4d09479 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin11.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin11.q.out @@ -3,13 +3,13 @@ CLUSTERED BY (key) INTO 4 BUCKETS SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n31 +PREHOOK: Output: default@t1_n31 POSTHOOK: query: CREATE TABLE T1_n31(key STRING, val STRING) CLUSTERED BY (key) INTO 4 BUCKETS SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n31 +POSTHOOK: Output: default@t1_n31 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n31 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -21,11 +21,11 @@ POSTHOOK: Output: default@t1_n31 PREHOOK: query: CREATE TABLE T2_n21(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n21 +PREHOOK: Output: default@t2_n21 POSTHOOK: query: CREATE TABLE T2_n21(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n21 +POSTHOOK: Output: default@t2_n21 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n21 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out index 0d770d9ab8..02bf21332b 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n25(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n25 +PREHOOK: Output: default@t1_n25 POSTHOOK: query: CREATE TABLE T1_n25(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n25 +POSTHOOK: Output: default@t1_n25 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n25 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n16(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n16 +PREHOOK: Output: default@t2_n16 POSTHOOK: query: CREATE TABLE T2_n16(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n16 +POSTHOOK: Output: default@t2_n16 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n16 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin3.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin3.q.out index 42c9319940..a956faeb88 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin3.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin3.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n155(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n155 +PREHOOK: Output: default@t1_n155 POSTHOOK: query: CREATE TABLE T1_n155(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n155 +POSTHOOK: Output: default@t1_n155 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n155 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n91(key STRING, val STRING) SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n91 +PREHOOK: Output: default@t2_n91 POSTHOOK: query: CREATE TABLE T2_n91(key STRING, val STRING) SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n91 +POSTHOOK: Output: default@t2_n91 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n91 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin4.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin4.q.out index 9c8fa2ce73..aff3842fab 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin4.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin4.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n124(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n124 +PREHOOK: Output: default@t1_n124 POSTHOOK: query: CREATE TABLE T1_n124(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n124 +POSTHOOK: Output: default@t1_n124 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n124 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n73(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n73 +PREHOOK: Output: default@t2_n73 POSTHOOK: query: CREATE TABLE T2_n73(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n73 +POSTHOOK: Output: default@t2_n73 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n73 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -37,11 +37,11 @@ POSTHOOK: Output: default@t2_n73 PREHOOK: query: CREATE TABLE T3_n27(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n27 +PREHOOK: Output: default@t3_n27 POSTHOOK: query: CREATE TABLE T3_n27(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n27 +POSTHOOK: Output: default@t3_n27 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n27 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin5.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin5.q.out index 6796c3b830..8b8ffe2deb 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin5.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin5.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n87(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n87 +PREHOOK: Output: default@t1_n87 POSTHOOK: query: CREATE TABLE T1_n87(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n87 +POSTHOOK: Output: default@t1_n87 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n87 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -19,11 +19,11 @@ POSTHOOK: Output: default@t1_n87 PREHOOK: query: CREATE TABLE T2_n54(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n54 +PREHOOK: Output: default@t2_n54 POSTHOOK: query: CREATE TABLE T2_n54(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n54 +POSTHOOK: Output: default@t2_n54 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n54 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin6.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin6.q.out index bdebe7e34d..0b4d84d625 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin6.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin6.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n39(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n39 +PREHOOK: Output: default@t1_n39 POSTHOOK: query: CREATE TABLE T1_n39(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n39 +POSTHOOK: Output: default@t1_n39 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n39 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -21,11 +21,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table array_valued_T1_n39 (key string, value array) SKEWED BY (key) ON ((8)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@array_valued_T1_n39 +PREHOOK: Output: default@array_valued_t1_n39 POSTHOOK: query: create table array_valued_T1_n39 (key string, value array) SKEWED BY (key) ON ((8)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@array_valued_T1_n39 +POSTHOOK: Output: default@array_valued_t1_n39 PREHOOK: query: insert overwrite table array_valued_T1_n39 select key, array(value) from T1_n39 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n39 diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin7.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin7.q.out index b62203aeca..bedd07ea8f 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin7.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin7.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n157(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n157 +PREHOOK: Output: default@t1_n157 POSTHOOK: query: CREATE TABLE T1_n157(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n157 +POSTHOOK: Output: default@t1_n157 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n157 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -19,11 +19,11 @@ POSTHOOK: Output: default@t1_n157 PREHOOK: query: CREATE TABLE T2_n92(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n92 +PREHOOK: Output: default@t2_n92 POSTHOOK: query: CREATE TABLE T2_n92(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n92 +POSTHOOK: Output: default@t2_n92 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n92 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin8.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin8.q.out index 131c1848a4..fe4d58ee62 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin8.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin8.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n29(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n29 +PREHOOK: Output: default@t1_n29 POSTHOOK: query: CREATE TABLE T1_n29(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n29 +POSTHOOK: Output: default@t1_n29 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n29 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@t1_n29 PREHOOK: query: CREATE TABLE T2_n20(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n20 +PREHOOK: Output: default@t2_n20 POSTHOOK: query: CREATE TABLE T2_n20(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n20 +POSTHOOK: Output: default@t2_n20 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n20 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -34,12 +34,12 @@ PREHOOK: query: CREATE TABLE T3_n7(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n7 +PREHOOK: Output: default@t3_n7 POSTHOOK: query: CREATE TABLE T3_n7(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n7 +POSTHOOK: Output: default@t3_n7 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n7 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin9.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin9.q.out index 5035aa48f4..f20fc9d333 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin9.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin9.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n152(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n152 +PREHOOK: Output: default@t1_n152 POSTHOOK: query: CREATE TABLE T1_n152(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n152 +POSTHOOK: Output: default@t1_n152 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n152 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -19,11 +19,11 @@ POSTHOOK: Output: default@t1_n152 PREHOOK: query: CREATE TABLE T2_n89(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n89 +PREHOOK: Output: default@t2_n89 POSTHOOK: query: CREATE TABLE T2_n89(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n89 +POSTHOOK: Output: default@t2_n89 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n89 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -36,12 +36,12 @@ PREHOOK: query: CREATE TABLE T3_n36(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n36 +PREHOOK: Output: default@t3_n36 POSTHOOK: query: CREATE TABLE T3_n36(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n36 +POSTHOOK: Output: default@t3_n36 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n36 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoin_noskew.q.out ql/src/test/results/clientpositive/skewjoin_noskew.q.out index 7cd81466e6..e903e2ba1d 100644 --- ql/src/test/results/clientpositive/skewjoin_noskew.q.out +++ ql/src/test/results/clientpositive/skewjoin_noskew.q.out @@ -168,10 +168,10 @@ STAGE PLANS: Stage: Stage-9 Create Table columns: key string, value string - name: default.noskew input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.noskew Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/skewjoin_onesideskew.q.out ql/src/test/results/clientpositive/skewjoin_onesideskew.q.out index 75394dad06..df16b2d5db 100644 --- ql/src/test/results/clientpositive/skewjoin_onesideskew.q.out +++ ql/src/test/results/clientpositive/skewjoin_onesideskew.q.out @@ -234,10 +234,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.result_n1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.result_n1 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out index 54d401c330..9340d16d77 100644 --- ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out +++ ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n57(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n57 +PREHOOK: Output: default@t1_n57 POSTHOOK: query: CREATE TABLE T1_n57(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n57 +POSTHOOK: Output: default@t1_n57 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n57 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n35(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n35 +PREHOOK: Output: default@t2_n35 POSTHOOK: query: CREATE TABLE T2_n35(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n35 +POSTHOOK: Output: default@t2_n35 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n35 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -342,11 +342,11 @@ NULL NULL 5 15 PREHOOK: query: create table DEST1_n58(key1 STRING, val1 STRING, key2 STRING, val2 STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n58 +PREHOOK: Output: default@dest1_n58 POSTHOOK: query: create table DEST1_n58(key1 STRING, val1 STRING, key2 STRING, val2 STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n58 +POSTHOOK: Output: default@dest1_n58 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE DEST1_n58 SELECT * FROM T1_n57 a JOIN T2_n35 b ON a.key = b.key diff --git ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out index 3793b85b43..29df6d1c71 100644 --- ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out +++ ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n8(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n8 +PREHOOK: Output: default@t1_n8 POSTHOOK: query: CREATE TABLE T1_n8(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n8 +POSTHOOK: Output: default@t1_n8 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n8 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n4(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n4 +PREHOOK: Output: default@t2_n4 POSTHOOK: query: CREATE TABLE T2_n4(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n4 +POSTHOOK: Output: default@t2_n4 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n4 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -37,11 +37,11 @@ POSTHOOK: Output: default@t2_n4 PREHOOK: query: CREATE TABLE T3_n2(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n2 +PREHOOK: Output: default@t3_n2 POSTHOOK: query: CREATE TABLE T3_n2(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n2 +POSTHOOK: Output: default@t3_n2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n2 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt1.q.out ql/src/test/results/clientpositive/skewjoinopt1.q.out index 642d841276..acf548e7d9 100644 --- ql/src/test/results/clientpositive/skewjoinopt1.q.out +++ ql/src/test/results/clientpositive/skewjoinopt1.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n101(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n101 +PREHOOK: Output: default@t1_n101 POSTHOOK: query: CREATE TABLE T1_n101(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n101 +POSTHOOK: Output: default@t1_n101 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n101 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n64(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n64 +PREHOOK: Output: default@t2_n64 POSTHOOK: query: CREATE TABLE T2_n64(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n64 +POSTHOOK: Output: default@t2_n64 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n64 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt10.q.out ql/src/test/results/clientpositive/skewjoinopt10.q.out index 4122d65bff..a7947465d4 100644 --- ql/src/test/results/clientpositive/skewjoinopt10.q.out +++ ql/src/test/results/clientpositive/skewjoinopt10.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n88(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n88 +PREHOOK: Output: default@t1_n88 POSTHOOK: query: CREATE TABLE T1_n88(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n88 +POSTHOOK: Output: default@t1_n88 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n88 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -21,11 +21,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table array_valued_T1_n0 (key string, value array) SKEWED BY (key) ON ((8)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@array_valued_T1_n0 +PREHOOK: Output: default@array_valued_t1_n0 POSTHOOK: query: create table array_valued_T1_n0 (key string, value array) SKEWED BY (key) ON ((8)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@array_valued_T1_n0 +POSTHOOK: Output: default@array_valued_t1_n0 PREHOOK: query: insert overwrite table array_valued_T1_n0 select key, array(value) from T1_n88 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n88 diff --git ql/src/test/results/clientpositive/skewjoinopt11.q.out ql/src/test/results/clientpositive/skewjoinopt11.q.out index 609b04af66..dac7e2a101 100644 --- ql/src/test/results/clientpositive/skewjoinopt11.q.out +++ ql/src/test/results/clientpositive/skewjoinopt11.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n122(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n122 +PREHOOK: Output: default@t1_n122 POSTHOOK: query: CREATE TABLE T1_n122(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n122 +POSTHOOK: Output: default@t1_n122 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n122 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -19,11 +19,11 @@ POSTHOOK: Output: default@t1_n122 PREHOOK: query: CREATE TABLE T2_n72(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n72 +PREHOOK: Output: default@t2_n72 POSTHOOK: query: CREATE TABLE T2_n72(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n72 +POSTHOOK: Output: default@t2_n72 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n72 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt12.q.out ql/src/test/results/clientpositive/skewjoinopt12.q.out index e8e9d75a23..6fc34fca06 100644 --- ql/src/test/results/clientpositive/skewjoinopt12.q.out +++ ql/src/test/results/clientpositive/skewjoinopt12.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n159(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n159 +PREHOOK: Output: default@t1_n159 POSTHOOK: query: CREATE TABLE T1_n159(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n159 +POSTHOOK: Output: default@t1_n159 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n159 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n93(key STRING, val STRING) SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n93 +PREHOOK: Output: default@t2_n93 POSTHOOK: query: CREATE TABLE T2_n93(key STRING, val STRING) SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n93 +POSTHOOK: Output: default@t2_n93 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n93 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt13.q.out ql/src/test/results/clientpositive/skewjoinopt13.q.out index 27b25dadbb..6689b14254 100644 --- ql/src/test/results/clientpositive/skewjoinopt13.q.out +++ ql/src/test/results/clientpositive/skewjoinopt13.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n38(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n38 +PREHOOK: Output: default@t1_n38 POSTHOOK: query: CREATE TABLE T1_n38(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n38 +POSTHOOK: Output: default@t1_n38 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n38 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@t1_n38 PREHOOK: query: CREATE TABLE T2_n25(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n25 +PREHOOK: Output: default@t2_n25 POSTHOOK: query: CREATE TABLE T2_n25(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n25 +POSTHOOK: Output: default@t2_n25 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n25 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -34,12 +34,12 @@ PREHOOK: query: CREATE TABLE T3_n9(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n9 +PREHOOK: Output: default@t3_n9 POSTHOOK: query: CREATE TABLE T3_n9(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n9 +POSTHOOK: Output: default@t3_n9 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n9 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt14.q.out ql/src/test/results/clientpositive/skewjoinopt14.q.out index 7b823c313a..354aa8421a 100644 --- ql/src/test/results/clientpositive/skewjoinopt14.q.out +++ ql/src/test/results/clientpositive/skewjoinopt14.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n65(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n65 +PREHOOK: Output: default@t1_n65 POSTHOOK: query: CREATE TABLE T1_n65(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n65 +POSTHOOK: Output: default@t1_n65 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n65 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -19,11 +19,11 @@ POSTHOOK: Output: default@t1_n65 PREHOOK: query: CREATE TABLE T2_n39(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n39 +PREHOOK: Output: default@t2_n39 POSTHOOK: query: CREATE TABLE T2_n39(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n39 +POSTHOOK: Output: default@t2_n39 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n39 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -36,12 +36,12 @@ PREHOOK: query: CREATE TABLE T3_n14(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n14 +PREHOOK: Output: default@t3_n14 POSTHOOK: query: CREATE TABLE T3_n14(key STRING, val STRING) SKEWED BY (val) ON ((12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n14 +POSTHOOK: Output: default@t3_n14 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n14 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt16.q.out ql/src/test/results/clientpositive/skewjoinopt16.q.out index b797a4e2a7..4f64974147 100644 --- ql/src/test/results/clientpositive/skewjoinopt16.q.out +++ ql/src/test/results/clientpositive/skewjoinopt16.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n154(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n154 +PREHOOK: Output: default@t1_n154 POSTHOOK: query: CREATE TABLE T1_n154(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n154 +POSTHOOK: Output: default@t1_n154 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n154 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n90(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n90 +PREHOOK: Output: default@t2_n90 POSTHOOK: query: CREATE TABLE T2_n90(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n90 +POSTHOOK: Output: default@t2_n90 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n90 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt17.q.out ql/src/test/results/clientpositive/skewjoinopt17.q.out index 50c40ce06d..92954dfc05 100644 --- ql/src/test/results/clientpositive/skewjoinopt17.q.out +++ ql/src/test/results/clientpositive/skewjoinopt17.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n27(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n27 +PREHOOK: Output: default@t1_n27 POSTHOOK: query: CREATE TABLE T1_n27(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n27 +POSTHOOK: Output: default@t1_n27 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n27 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n18(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n18 +PREHOOK: Output: default@t2_n18 POSTHOOK: query: CREATE TABLE T2_n18(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n18 +POSTHOOK: Output: default@t2_n18 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n18 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -229,12 +229,12 @@ PREHOOK: query: CREATE TABLE T1_n27(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n27 +PREHOOK: Output: default@t1_n27 POSTHOOK: query: CREATE TABLE T1_n27(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n27 +POSTHOOK: Output: default@t1_n27 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n27 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -247,12 +247,12 @@ PREHOOK: query: CREATE TABLE T2_n18(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n18 +PREHOOK: Output: default@t2_n18 POSTHOOK: query: CREATE TABLE T2_n18(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n18 +POSTHOOK: Output: default@t2_n18 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n18 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt18.q.out ql/src/test/results/clientpositive/skewjoinopt18.q.out index 22e98c4e86..a252c8f2a4 100644 --- ql/src/test/results/clientpositive/skewjoinopt18.q.out +++ ql/src/test/results/clientpositive/skewjoinopt18.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE tmpT1_n1(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@tmpT1_n1 +PREHOOK: Output: default@tmpt1_n1 POSTHOOK: query: CREATE TABLE tmpT1_n1(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@tmpT1_n1 +POSTHOOK: Output: default@tmpt1_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1_n1 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@tmpt1_n1 PREHOOK: query: CREATE TABLE T1_n160(key INT, val STRING) SKEWED BY (key) ON ((2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n160 +PREHOOK: Output: default@t1_n160 POSTHOOK: query: CREATE TABLE T1_n160(key INT, val STRING) SKEWED BY (key) ON ((2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n160 +POSTHOOK: Output: default@t1_n160 PREHOOK: query: INSERT OVERWRITE TABLE T1_n160 SELECT key, val FROM tmpT1_n1 PREHOOK: type: QUERY PREHOOK: Input: default@tmpt1_n1 @@ -36,12 +36,12 @@ PREHOOK: query: CREATE TABLE T2_n94(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n94 +PREHOOK: Output: default@t2_n94 POSTHOOK: query: CREATE TABLE T2_n94(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n94 +POSTHOOK: Output: default@t2_n94 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n94 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt19.q.out ql/src/test/results/clientpositive/skewjoinopt19.q.out index c1c842e758..ed41945b85 100644 --- ql/src/test/results/clientpositive/skewjoinopt19.q.out +++ ql/src/test/results/clientpositive/skewjoinopt19.q.out @@ -3,13 +3,13 @@ CLUSTERED BY (key) INTO 4 BUCKETS SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n34 +PREHOOK: Output: default@t1_n34 POSTHOOK: query: CREATE TABLE T1_n34(key STRING, val STRING) CLUSTERED BY (key) INTO 4 BUCKETS SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n34 +POSTHOOK: Output: default@t1_n34 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n34 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -21,11 +21,11 @@ POSTHOOK: Output: default@t1_n34 PREHOOK: query: CREATE TABLE T2_n22(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n22 +PREHOOK: Output: default@t2_n22 POSTHOOK: query: CREATE TABLE T2_n22(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n22 +POSTHOOK: Output: default@t2_n22 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n22 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt2.q.out ql/src/test/results/clientpositive/skewjoinopt2.q.out index 5ffcddae04..29bf7032ca 100644 --- ql/src/test/results/clientpositive/skewjoinopt2.q.out +++ ql/src/test/results/clientpositive/skewjoinopt2.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n139(key STRING, val STRING) SKEWED BY (key) ON ((2), (7)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n139 +PREHOOK: Output: default@t1_n139 POSTHOOK: query: CREATE TABLE T1_n139(key STRING, val STRING) SKEWED BY (key) ON ((2), (7)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n139 +POSTHOOK: Output: default@t1_n139 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n139 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n81(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n81 +PREHOOK: Output: default@t2_n81 POSTHOOK: query: CREATE TABLE T2_n81(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n81 +POSTHOOK: Output: default@t2_n81 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n81 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt20.q.out ql/src/test/results/clientpositive/skewjoinopt20.q.out index 0dca089cdf..33cd45fdf7 100644 --- ql/src/test/results/clientpositive/skewjoinopt20.q.out +++ ql/src/test/results/clientpositive/skewjoinopt20.q.out @@ -3,13 +3,13 @@ CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n103 +PREHOOK: Output: default@t1_n103 POSTHOOK: query: CREATE TABLE T1_n103(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n103 +POSTHOOK: Output: default@t1_n103 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n103 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -21,11 +21,11 @@ POSTHOOK: Output: default@t1_n103 PREHOOK: query: CREATE TABLE T2_n65(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n65 +PREHOOK: Output: default@t2_n65 POSTHOOK: query: CREATE TABLE T2_n65(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n65 +POSTHOOK: Output: default@t2_n65 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n65 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt21.q.out ql/src/test/results/clientpositive/skewjoinopt21.q.out index 3a0c59380f..0df52b72dc 100644 --- ql/src/test/results/clientpositive/skewjoinopt21.q.out +++ ql/src/test/results/clientpositive/skewjoinopt21.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n63(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n63 +PREHOOK: Output: default@t1_n63 POSTHOOK: query: CREATE TABLE T1_n63(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n63 +POSTHOOK: Output: default@t1_n63 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n63 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n38(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n38 +PREHOOK: Output: default@t2_n38 POSTHOOK: query: CREATE TABLE T2_n38(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n38 +POSTHOOK: Output: default@t2_n38 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n38 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt3.q.out ql/src/test/results/clientpositive/skewjoinopt3.q.out index 8b1e3e5330..b05a995fe9 100644 --- ql/src/test/results/clientpositive/skewjoinopt3.q.out +++ ql/src/test/results/clientpositive/skewjoinopt3.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n12(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n12 +PREHOOK: Output: default@t1_n12 POSTHOOK: query: CREATE TABLE T1_n12(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n12 +POSTHOOK: Output: default@t1_n12 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n12 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n7(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n7 +PREHOOK: Output: default@t2_n7 POSTHOOK: query: CREATE TABLE T2_n7(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n7 +POSTHOOK: Output: default@t2_n7 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n7 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt4.q.out ql/src/test/results/clientpositive/skewjoinopt4.q.out index 13bbd7d776..5e8f14ee9b 100644 --- ql/src/test/results/clientpositive/skewjoinopt4.q.out +++ ql/src/test/results/clientpositive/skewjoinopt4.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n52(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n52 +PREHOOK: Output: default@t1_n52 POSTHOOK: query: CREATE TABLE T1_n52(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n52 +POSTHOOK: Output: default@t1_n52 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n52 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -19,11 +19,11 @@ POSTHOOK: Output: default@t1_n52 PREHOOK: query: CREATE TABLE T2_n32(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n32 +PREHOOK: Output: default@t2_n32 POSTHOOK: query: CREATE TABLE T2_n32(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n32 +POSTHOOK: Output: default@t2_n32 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n32 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt5.q.out ql/src/test/results/clientpositive/skewjoinopt5.q.out index 99ec504122..c68291001e 100644 --- ql/src/test/results/clientpositive/skewjoinopt5.q.out +++ ql/src/test/results/clientpositive/skewjoinopt5.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n100(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n100 +PREHOOK: Output: default@t1_n100 POSTHOOK: query: CREATE TABLE T1_n100(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n100 +POSTHOOK: Output: default@t1_n100 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n100 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n63(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n63 +PREHOOK: Output: default@t2_n63 POSTHOOK: query: CREATE TABLE T2_n63(key STRING, val STRING) SKEWED BY (key) ON ((3)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n63 +POSTHOOK: Output: default@t2_n63 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n63 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt6.q.out ql/src/test/results/clientpositive/skewjoinopt6.q.out index c72d47d7ab..f925d3aaa7 100644 --- ql/src/test/results/clientpositive/skewjoinopt6.q.out +++ ql/src/test/results/clientpositive/skewjoinopt6.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n130(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n130 +PREHOOK: Output: default@t1_n130 POSTHOOK: query: CREATE TABLE T1_n130(key STRING, val STRING) SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n130 +POSTHOOK: Output: default@t1_n130 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n130 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2_n77(key STRING, val STRING) SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n77 +PREHOOK: Output: default@t2_n77 POSTHOOK: query: CREATE TABLE T2_n77(key STRING, val STRING) SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n77 +POSTHOOK: Output: default@t2_n77 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n77 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt7.q.out ql/src/test/results/clientpositive/skewjoinopt7.q.out index 1d10d558ff..88947315b5 100644 --- ql/src/test/results/clientpositive/skewjoinopt7.q.out +++ ql/src/test/results/clientpositive/skewjoinopt7.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -20,12 +20,12 @@ PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2 +PREHOOK: Output: default@t2 POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2 +POSTHOOK: Output: default@t2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -37,11 +37,11 @@ POSTHOOK: Output: default@t2 PREHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3 +PREHOOK: Output: default@t3 POSTHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3 +POSTHOOK: Output: default@t3 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt8.q.out ql/src/test/results/clientpositive/skewjoinopt8.q.out index ba0245e21e..3cd11c82ae 100644 --- ql/src/test/results/clientpositive/skewjoinopt8.q.out +++ ql/src/test/results/clientpositive/skewjoinopt8.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n140(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n140 +PREHOOK: Output: default@t1_n140 POSTHOOK: query: CREATE TABLE T1_n140(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n140 +POSTHOOK: Output: default@t1_n140 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n140 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -18,12 +18,12 @@ PREHOOK: query: CREATE TABLE T2_n82(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n82 +PREHOOK: Output: default@t2_n82 POSTHOOK: query: CREATE TABLE T2_n82(key STRING, val STRING) SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n82 +POSTHOOK: Output: default@t2_n82 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n82 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -35,11 +35,11 @@ POSTHOOK: Output: default@t2_n82 PREHOOK: query: CREATE TABLE T3_n33(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n33 +PREHOOK: Output: default@t3_n33 POSTHOOK: query: CREATE TABLE T3_n33(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n33 +POSTHOOK: Output: default@t3_n33 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3_n33 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/skewjoinopt9.q.out ql/src/test/results/clientpositive/skewjoinopt9.q.out index 15563db500..66fdeb0ce4 100644 --- ql/src/test/results/clientpositive/skewjoinopt9.q.out +++ ql/src/test/results/clientpositive/skewjoinopt9.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n9(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n9 +PREHOOK: Output: default@t1_n9 POSTHOOK: query: CREATE TABLE T1_n9(key STRING, val STRING) SKEWED BY (key) ON ((2)) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n9 +POSTHOOK: Output: default@t1_n9 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n9 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -19,11 +19,11 @@ POSTHOOK: Output: default@t1_n9 PREHOOK: query: CREATE TABLE T2_n5(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n5 +PREHOOK: Output: default@t2_n5 POSTHOOK: query: CREATE TABLE T2_n5(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n5 +POSTHOOK: Output: default@t2_n5 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n5 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/smb_mapjoin9.q.out ql/src/test/results/clientpositive/smb_mapjoin9.q.out index 2d4f422e10..03ceac649f 100644 --- ql/src/test/results/clientpositive/smb_mapjoin9.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin9.q.out @@ -323,10 +323,10 @@ STAGE PLANS: Stage: Stage-9 Create Table columns: k1 int, value string, ds string, k2 int - name: default.smb_mapjoin9_results input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.smb_mapjoin9_results Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out index 16d6d8f846..39fd474fd7 100644 --- ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out +++ ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n8 +PREHOOK: Output: default@a_n8 POSTHOOK: query: create table A_n8 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n8 +POSTHOOK: Output: default@a_n8 POSTHOOK: Lineage: a_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n6 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n6 +PREHOOK: Output: default@b_n6 POSTHOOK: query: create table B_n6 as select * from src limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n6 +POSTHOOK: Output: default@b_n6 POSTHOOK: Lineage: b_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Work 'Reducer 2' is a cross product diff --git ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out index adebe73524..dc13bc78a4 100644 --- ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out +++ ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n2 +PREHOOK: Output: default@a_n2 POSTHOOK: query: create table A_n2 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n2 +POSTHOOK: Output: default@a_n2 POSTHOOK: Lineage: a_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n2 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n2 +PREHOOK: Output: default@b_n2 POSTHOOK: query: create table B_n2 as select * from src order by key limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n2 +POSTHOOK: Output: default@b_n2 POSTHOOK: Lineage: b_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-1:MAPRED' is a cross product diff --git ql/src/test/results/clientpositive/spark/ctas.q.out ql/src/test/results/clientpositive/spark/ctas.q.out index d6738a2b8f..eae9abcdb9 100644 --- ql/src/test/results/clientpositive/spark/ctas.q.out +++ ql/src/test/results/clientpositive/spark/ctas.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table nzhang_Tmp(a int, b string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_Tmp +PREHOOK: Output: default@nzhang_tmp POSTHOOK: query: create table nzhang_Tmp(a int, b string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_Tmp +POSTHOOK: Output: default@nzhang_tmp PREHOOK: query: select * from nzhang_Tmp PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_tmp @@ -18,12 +18,12 @@ PREHOOK: query: explain create table nzhang_CTAS1 as select key k, value from sr PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -87,7 +87,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_CTAS1 + name: default.nzhang_ctas1 Stage: Stage-0 Move Operator @@ -98,10 +98,10 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: k string, value string - name: default.nzhang_CTAS1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas1 Stage: Stage-2 Stats Work @@ -111,12 +111,12 @@ PREHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort b PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 POSTHOOK: Lineage: nzhang_ctas1.k SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_ctas1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from nzhang_CTAS1 @@ -256,10 +256,10 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: key string, value string - name: default.nzhang_ctas2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas2 Stage: Stage-2 Stats Work @@ -414,10 +414,10 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: half_key double, conb string - name: default.nzhang_ctas3 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde name: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: hive.default.nzhang_ctas3 Stage: Stage-2 Stats Work @@ -636,11 +636,11 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: key string, value string - name: default.nzhang_ctas4 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas4 Stage: Stage-2 Stats Work @@ -796,13 +796,13 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: key string, value string - name: default.nzhang_ctas5 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat line delimiter: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas5 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out index eab45b9315..72dd105f81 100644 --- ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out +++ ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out @@ -2,36 +2,36 @@ PREHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n85 +PREHOOK: Output: default@t1_n85 POSTHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n85 +POSTHOOK: Output: default@t1_n85 PREHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n53 +PREHOOK: Output: default@t2_n53 POSTHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n53 +POSTHOOK: Output: default@t2_n53 PREHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n18 +PREHOOK: Output: default@t3_n18 POSTHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n18 +POSTHOOK: Output: default@t3_n18 PREHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n8 +PREHOOK: Output: default@t4_n8 POSTHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n8 +POSTHOOK: Output: default@t4_n8 PREHOOK: query: insert overwrite table T1_n85 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/spark/groupby10.q.out ql/src/test/results/clientpositive/spark/groupby10.q.out index eb1d93d9ef..f01845d9aa 100644 --- ql/src/test/results/clientpositive/spark/groupby10.q.out +++ ql/src/test/results/clientpositive/spark/groupby10.q.out @@ -17,11 +17,11 @@ POSTHOOK: Output: default@dest2 PREHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT +PREHOOK: Output: default@input POSTHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT +POSTHOOK: Output: default@input PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/spark/groupby7.q.out ql/src/test/results/clientpositive/spark/groupby7.q.out index 3dd3a5e588..67b6ed1877 100644 --- ql/src/test/results/clientpositive/spark/groupby7.q.out +++ ql/src/test/results/clientpositive/spark/groupby7.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n132 +PREHOOK: Output: default@dest1_n132 POSTHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n132 +POSTHOOK: Output: default@dest1_n132 PREHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n34 +PREHOOK: Output: default@dest2_n34 POSTHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n34 +POSTHOOK: Output: default@dest2_n34 PREHOOK: query: FROM SRC INSERT OVERWRITE TABLE DEST1_n132 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key INSERT OVERWRITE TABLE DEST2_n34 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/spark/groupby7_map.q.out ql/src/test/results/clientpositive/spark/groupby7_map.q.out index ea85a33850..ba39928ceb 100644 --- ql/src/test/results/clientpositive/spark/groupby7_map.q.out +++ ql/src/test/results/clientpositive/spark/groupby7_map.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n82 +PREHOOK: Output: default@dest1_n82 POSTHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n82 +POSTHOOK: Output: default@dest1_n82 PREHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n19 +PREHOOK: Output: default@dest2_n19 POSTHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n19 +POSTHOOK: Output: default@dest2_n19 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n82 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out index d3a96c1ed9..1f57b710cf 100644 --- ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n15 +PREHOOK: Output: default@dest1_n15 POSTHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n15 +POSTHOOK: Output: default@dest1_n15 PREHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n3 +PREHOOK: Output: default@dest2_n3 POSTHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n3 +POSTHOOK: Output: default@dest2_n3 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n15 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/spark/input16_cc.q.out ql/src/test/results/clientpositive/spark/input16_cc.q.out index 195e82b301..0cf1e418a2 100644 --- ql/src/test/results/clientpositive/spark/input16_cc.q.out +++ ql/src/test/results/clientpositive/spark/input16_cc.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT16_CC +PREHOOK: Output: default@input16_cc POSTHOOK: query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT16_CC +POSTHOOK: Output: default@input16_cc PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/symlink_text_input_format.q.out ql/src/test/results/clientpositive/symlink_text_input_format.q.out index 2fea2ca10b..f79cc3b67d 100644 --- ql/src/test/results/clientpositive/symlink_text_input_format.q.out +++ ql/src/test/results/clientpositive/symlink_text_input_format.q.out @@ -19,9 +19,9 @@ STAGE PLANS: Stage: Stage-0 Create Table columns: key string, value string - name: default.symlink_text_input_format input format: org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + name: hive.default.symlink_text_input_format PREHOOK: query: CREATE TABLE symlink_text_input_format (key STRING, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat' PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out index 505e83c1fe..23b5b64024 100644 --- ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out @@ -17,7 +17,7 @@ row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE #### A masked pattern was here #### PREHOOK: Output: database:default -PREHOOK: Output: default@UserVisits_web_text_none +PREHOOK: Output: default@uservisits_web_text_none POSTHOOK: query: CREATE TEMPORARY EXTERNAL TABLE UserVisits_web_text_none ( sourceIP string, destURL string, @@ -33,7 +33,7 @@ row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE #### A masked pattern was here #### POSTHOOK: Output: database:default -POSTHOOK: Output: default@UserVisits_web_text_none +POSTHOOK: Output: default@uservisits_web_text_none PREHOOK: query: desc formatted UserVisits_web_text_none PREHOOK: type: DESCTABLE PREHOOK: Input: default@uservisits_web_text_none @@ -629,7 +629,7 @@ PREHOOK: query: CREATE TEMPORARY TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:test -PREHOOK: Output: test@UserVisits_web_text_none +PREHOOK: Output: test@uservisits_web_text_none POSTHOOK: query: CREATE TEMPORARY TABLE UserVisits_web_text_none ( sourceIP string, destURL string, @@ -643,7 +643,7 @@ POSTHOOK: query: CREATE TEMPORARY TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:test -POSTHOOK: Output: test@UserVisits_web_text_none +POSTHOOK: Output: test@uservisits_web_text_none PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/temp_table_insert1_overwrite_partitions.q.out ql/src/test/results/clientpositive/temp_table_insert1_overwrite_partitions.q.out index b622f51d4c..0ee04e44e5 100644 --- ql/src/test/results/clientpositive/temp_table_insert1_overwrite_partitions.q.out +++ ql/src/test/results/clientpositive/temp_table_insert1_overwrite_partitions.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TEMPORARY TABLE sourceTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@sourceTable_temp +PREHOOK: Output: default@sourcetable_temp POSTHOOK: query: CREATE TEMPORARY TABLE sourceTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@sourceTable_temp +POSTHOOK: Output: default@sourcetable_temp PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE sourceTable_temp partition(ds='2011-11-11', hr='11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -27,11 +27,11 @@ POSTHOOK: Output: default@sourcetable_temp@ds=2011-11-11/hr=12 PREHOOK: query: CREATE TEMPORARY TABLE destinTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destinTable_temp +PREHOOK: Output: default@destintable_temp POSTHOOK: query: CREATE TEMPORARY TABLE destinTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destinTable_temp +POSTHOOK: Output: default@destintable_temp PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE destinTable_temp PARTITION (ds='2011-11-11', hr='11') if not exists SELECT one,two FROM sourceTable_temp WHERE ds='2011-11-11' AND hr='11' order by one desc, two desc limit 5 PREHOOK: type: QUERY @@ -225,11 +225,11 @@ POSTHOOK: Output: default@destintable_temp PREHOOK: query: CREATE TEMPORARY TABLE destinTable_temp (one string,two string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destinTable_temp +PREHOOK: Output: default@destintable_temp POSTHOOK: query: CREATE TEMPORARY TABLE destinTable_temp (one string,two string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destinTable_temp +POSTHOOK: Output: default@destintable_temp PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE destinTable_temp SELECT one,two FROM sourceTable_temp WHERE ds='2011-11-11' AND hr='11' order by one desc, two desc limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@sourcetable_temp @@ -326,11 +326,11 @@ POSTHOOK: Output: default@sourcetable_temp PREHOOK: query: CREATE TEMPORARY TABLE sourceTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@sourceTable_temp +PREHOOK: Output: default@sourcetable_temp POSTHOOK: query: CREATE TEMPORARY TABLE sourceTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@sourceTable_temp +POSTHOOK: Output: default@sourcetable_temp PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE sourceTable_temp partition(ds='2011-11-11', hr='11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -343,11 +343,11 @@ POSTHOOK: Output: default@sourcetable_temp@ds=2011-11-11/hr=11 PREHOOK: query: CREATE TEMPORARY TABLE destinTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@destinTable_temp +PREHOOK: Output: default@destintable_temp POSTHOOK: query: CREATE TEMPORARY TABLE destinTable_temp (one string,two string) PARTITIONED BY (ds string,hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@destinTable_temp +POSTHOOK: Output: default@destintable_temp PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE destinTable_temp PARTITION (ds='2011-11-11', hr='11') if not exists SELECT one,two FROM sourceTable_temp WHERE ds='2011-11-11' AND hr='11' order by one desc, two desc limit 5 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/temp_table_insert2_overwrite_partitions.q.out ql/src/test/results/clientpositive/temp_table_insert2_overwrite_partitions.q.out index a5314e944c..2effc1fded 100644 --- ql/src/test/results/clientpositive/temp_table_insert2_overwrite_partitions.q.out +++ ql/src/test/results/clientpositive/temp_table_insert2_overwrite_partitions.q.out @@ -13,11 +13,11 @@ POSTHOOK: Output: database:db2 PREHOOK: query: CREATE TEMPORARY TABLE db1.sourceTable_temp (one string,two string) PARTITIONED BY (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:db1 -PREHOOK: Output: db1@sourceTable_temp +PREHOOK: Output: db1@sourcetable_temp POSTHOOK: query: CREATE TEMPORARY TABLE db1.sourceTable_temp (one string,two string) PARTITIONED BY (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:db1 -POSTHOOK: Output: db1@sourceTable_temp +POSTHOOK: Output: db1@sourcetable_temp PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE db1.sourceTable_temp partition(ds='2011-11-11') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -38,11 +38,11 @@ POSTHOOK: Output: db1@sourcetable_temp@ds=2011-11-11 PREHOOK: query: CREATE TEMPORARY TABLE db2.destinTable_temp (one string,two string) PARTITIONED BY (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:db2 -PREHOOK: Output: db2@destinTable_temp +PREHOOK: Output: db2@destintable_temp POSTHOOK: query: CREATE TEMPORARY TABLE db2.destinTable_temp (one string,two string) PARTITIONED BY (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:db2 -POSTHOOK: Output: db2@destinTable_temp +POSTHOOK: Output: db2@destintable_temp PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE db2.destinTable_temp PARTITION (ds='2011-11-11') SELECT one,two FROM db1.sourceTable_temp WHERE ds='2011-11-11' order by one desc, two desc limit 5 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/temp_table_partition_ctas.q.out ql/src/test/results/clientpositive/temp_table_partition_ctas.q.out index bd3574f03f..7098832250 100644 --- ql/src/test/results/clientpositive/temp_table_partition_ctas.q.out +++ ql/src/test/results/clientpositive/temp_table_partition_ctas.q.out @@ -59,12 +59,12 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: value string - name: default.partition_ctas_1_temp input format: org.apache.hadoop.mapred.TextInputFormat #### A masked pattern was here #### output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat partition columns: key string serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.partition_ctas_1_temp isTemporary: true Stage: Stage-0 diff --git ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out index dad999ab1a..7e9e714365 100644 --- ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out +++ ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out @@ -152,11 +152,11 @@ POSTHOOK: Input: database:newdb PREHOOK: query: create table tab_n2 (name string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:newdb -PREHOOK: Output: newDB@tab_n2 +PREHOOK: Output: newdb@tab_n2 POSTHOOK: query: create table tab_n2 (name string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:newdb -POSTHOOK: Output: newDB@tab_n2 +POSTHOOK: Output: newdb@tab_n2 PREHOOK: query: alter table tab_n2 rename to newName PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: newdb@tab_n2 @@ -174,7 +174,7 @@ POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n2 POSTHOOK: Output: newdb@tab_n2 Stage-0 - Rename Table{"table name:":"newDB.tab_n2","new table name:":"newDB.newName"} + Rename Table{"table name:":"hive.newdb.tab_n2","new table name:":"hive.newdb.newname"} PREHOOK: query: drop table tab_n2 PREHOOK: type: DROPTABLE @@ -406,7 +406,7 @@ Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) Stage-3 Stats Work{} Stage-4 - Create Table{"name:":"default.src_autho_test_n4"} + Create Table{"name:":"hive.default.src_autho_test_n4"} Stage-0 Move Operator Stage-1 @@ -672,7 +672,7 @@ POSTHOOK: Output: default@v_n5 Plan optimized by CBO. Stage-1 - Create View{"name:":"default.v_n5","original text:":"with cte as (select * from src order by key limit 5)\nselect * from cte"} + Create View{"name:":"hive.default.v_n5","original text:":"with cte as (select * from src order by key limit 5)\nselect * from cte"} PREHOOK: query: with cte as (select * from src order by key limit 5) select * from cte diff --git ql/src/test/results/clientpositive/tez/explainuser_3.q.out ql/src/test/results/clientpositive/tez/explainuser_3.q.out index 5218c90f0c..9c2aa3d91a 100644 --- ql/src/test/results/clientpositive/tez/explainuser_3.q.out +++ ql/src/test/results/clientpositive/tez/explainuser_3.q.out @@ -164,11 +164,11 @@ POSTHOOK: Input: database:newdb PREHOOK: query: create table tab_n1 (name string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:newdb -PREHOOK: Output: newDB@tab_n1 +PREHOOK: Output: newdb@tab_n1 POSTHOOK: query: create table tab_n1 (name string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:newdb -POSTHOOK: Output: newDB@tab_n1 +POSTHOOK: Output: newdb@tab_n1 PREHOOK: query: explain alter table tab_n1 rename to newName PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: newdb@tab_n1 @@ -178,7 +178,7 @@ POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n1 POSTHOOK: Output: newdb@tab_n1 Stage-0 - Rename Table{"table name:":"newDB.tab_n1","new table name:":"newDB.newName"} + Rename Table{"table name:":"hive.newdb.tab_n1","new table name:":"hive.newdb.newname"} PREHOOK: query: explain drop table tab_n1 PREHOOK: type: DROPTABLE @@ -334,7 +334,7 @@ Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) Stage-3 Stats Work{} Stage-4 - Create Table{"name:":"default.src_autho_test_n3"} + Create Table{"name:":"hive.default.src_autho_test_n3"} Stage-0 Move Operator Stage-1 @@ -538,7 +538,7 @@ POSTHOOK: Output: default@v_n1 Plan optimized by CBO. Stage-1 - Create View{"name:":"default.v_n1","original text:":"with cte as (select * from src order by key limit 5)\nselect * from cte"} + Create View{"name:":"hive.default.v_n1","original text:":"with cte as (select * from src order by key limit 5)\nselect * from cte"} PREHOOK: query: explain with cte as (select * from src order by key limit 5) select * from cte diff --git ql/src/test/results/clientpositive/tez/tez_union_udtf.q.out ql/src/test/results/clientpositive/tez/tez_union_udtf.q.out index cfe4481326..1f9afc4f6a 100644 --- ql/src/test/results/clientpositive/tez/tez_union_udtf.q.out +++ ql/src/test/results/clientpositive/tez/tez_union_udtf.q.out @@ -32,7 +32,7 @@ Reducer 3 <- Union 2 (CUSTOM_SIMPLE_EDGE) Stage-3 Stats Work{} Stage-9 - Create Table{"name:":"default.x"} + Create Table{"name:":"hive.default.x"} Stage-0 Move Operator Stage-5(CONDITIONAL) diff --git ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out index 955d268fa5..9923a4deda 100644 --- ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out +++ ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out @@ -16,7 +16,7 @@ PREHOOK: query: create table testAltCol_n3 cNumeric3_2 NUMERIC(3,2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAltCol_n3 +PREHOOK: Output: default@testaltcol_n3 POSTHOOK: query: create table testAltCol_n3 (cId TINYINT, cFloat FLOAT, @@ -31,7 +31,7 @@ POSTHOOK: query: create table testAltCol_n3 cNumeric3_2 NUMERIC(3,2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltCol_n3 +POSTHOOK: Output: default@testaltcol_n3 PREHOOK: query: insert into testAltCol_n3 values (1, 1.234e5, @@ -558,12 +558,12 @@ PREHOOK: query: create table testAltColT_n3 stored as textfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColT_n3 +PREHOOK: Output: default@testaltcolt_n3 POSTHOOK: query: create table testAltColT_n3 stored as textfile as select * from testAltCol_n3 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColT_n3 +POSTHOOK: Output: default@testaltcolt_n3 POSTHOOK: Lineage: testaltcolt_n3.cdecimal16_8 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolt_n3.cdecimal38_18 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolt_n3.cdecimal38_37 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -2286,12 +2286,12 @@ PREHOOK: query: create table testAltColSF_n3 stored as sequencefile as select * PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColSF_n3 +PREHOOK: Output: default@testaltcolsf_n3 POSTHOOK: query: create table testAltColSF_n3 stored as sequencefile as select * from testAltCol_n3 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColSF_n3 +POSTHOOK: Output: default@testaltcolsf_n3 POSTHOOK: Lineage: testaltcolsf_n3.cdecimal16_8 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolsf_n3.cdecimal38_18 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolsf_n3.cdecimal38_37 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -4014,12 +4014,12 @@ PREHOOK: query: create table testAltColRCF_n3 stored as rcfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColRCF_n3 +PREHOOK: Output: default@testaltcolrcf_n3 POSTHOOK: query: create table testAltColRCF_n3 stored as rcfile as select * from testAltCol_n3 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColRCF_n3 +POSTHOOK: Output: default@testaltcolrcf_n3 POSTHOOK: Lineage: testaltcolrcf_n3.cdecimal16_8 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n3.cdecimal38_18 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n3.cdecimal38_37 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -5742,12 +5742,12 @@ PREHOOK: query: create table testAltColORC_n3 stored as orc as select * from tes PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColORC_n3 +PREHOOK: Output: default@testaltcolorc_n3 POSTHOOK: query: create table testAltColORC_n3 stored as orc as select * from testAltCol_n3 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColORC_n3 +POSTHOOK: Output: default@testaltcolorc_n3 POSTHOOK: Lineage: testaltcolorc_n3.cdecimal16_8 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolorc_n3.cdecimal38_18 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolorc_n3.cdecimal38_37 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -7010,12 +7010,12 @@ PREHOOK: query: create table testAltColPDE_n3 stored as parquet as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDE_n3 +PREHOOK: Output: default@testaltcolpde_n3 POSTHOOK: query: create table testAltColPDE_n3 stored as parquet as select * from testAltCol_n3 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDE_n3 +POSTHOOK: Output: default@testaltcolpde_n3 POSTHOOK: Lineage: testaltcolpde_n3.cdecimal16_8 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolpde_n3.cdecimal38_18 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolpde_n3.cdecimal38_37 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] @@ -8739,13 +8739,13 @@ select * from testAltCol_n3 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDD_n3 +PREHOOK: Output: default@testaltcolpdd_n3 POSTHOOK: query: create table testAltColPDD_n3 stored as parquet tblproperties ("parquet.enable.dictionary"="false") as select * from testAltCol_n3 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDD_n3 +POSTHOOK: Output: default@testaltcolpdd_n3 POSTHOOK: Lineage: testaltcolpdd_n3.cdecimal16_8 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal16_8, type:decimal(16,8), comment:null), ] POSTHOOK: Lineage: testaltcolpdd_n3.cdecimal38_18 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_18, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: testaltcolpdd_n3.cdecimal38_37 SIMPLE [(testaltcol_n3)testaltcol_n3.FieldSchema(name:cdecimal38_37, type:decimal(38,37), comment:null), ] diff --git ql/src/test/results/clientpositive/type_change_test_int_vectorized.q.out ql/src/test/results/clientpositive/type_change_test_int_vectorized.q.out index 87a5b3dd7f..0934ae4b5e 100644 --- ql/src/test/results/clientpositive/type_change_test_int_vectorized.q.out +++ ql/src/test/results/clientpositive/type_change_test_int_vectorized.q.out @@ -10,7 +10,7 @@ PREHOOK: query: create table testAltCol cTinyint TINYINT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAltCol +PREHOOK: Output: default@testaltcol POSTHOOK: query: create table testAltCol (cId TINYINT, cBigInt BIGINT, @@ -19,7 +19,7 @@ POSTHOOK: query: create table testAltCol cTinyint TINYINT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltCol +POSTHOOK: Output: default@testaltcol PREHOOK: query: insert into testAltCol values (1, 1234567890123456789, @@ -132,12 +132,12 @@ PREHOOK: query: create table testAltColT stored as textfile as select * from tes PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColT +PREHOOK: Output: default@testaltcolt POSTHOOK: query: create table testAltColT stored as textfile as select * from testAltCol POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColT +POSTHOOK: Output: default@testaltcolt POSTHOOK: Lineage: testaltcolt.cbigint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolt.cid SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolt.cint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cint, type:int, comment:null), ] @@ -501,12 +501,12 @@ PREHOOK: query: create table testAltColSF stored as sequencefile as select * fro PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColSF +PREHOOK: Output: default@testaltcolsf POSTHOOK: query: create table testAltColSF stored as sequencefile as select * from testAltCol POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColSF +POSTHOOK: Output: default@testaltcolsf POSTHOOK: Lineage: testaltcolsf.cbigint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolsf.cid SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolsf.cint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cint, type:int, comment:null), ] @@ -870,12 +870,12 @@ PREHOOK: query: create table testAltColRCF stored as rcfile as select * from tes PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColRCF +PREHOOK: Output: default@testaltcolrcf POSTHOOK: query: create table testAltColRCF stored as rcfile as select * from testAltCol POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColRCF +POSTHOOK: Output: default@testaltcolrcf POSTHOOK: Lineage: testaltcolrcf.cbigint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolrcf.cid SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolrcf.cint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cint, type:int, comment:null), ] @@ -1239,12 +1239,12 @@ PREHOOK: query: create table testAltColORC stored as orc as select * from testAl PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColORC +PREHOOK: Output: default@testaltcolorc POSTHOOK: query: create table testAltColORC stored as orc as select * from testAltCol POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColORC +POSTHOOK: Output: default@testaltcolorc POSTHOOK: Lineage: testaltcolorc.cbigint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolorc.cid SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolorc.cint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cint, type:int, comment:null), ] @@ -1608,12 +1608,12 @@ PREHOOK: query: create table testAltColPDE stored as parquet as select * from te PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDE +PREHOOK: Output: default@testaltcolpde POSTHOOK: query: create table testAltColPDE stored as parquet as select * from testAltCol POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDE +POSTHOOK: Output: default@testaltcolpde POSTHOOK: Lineage: testaltcolpde.cbigint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolpde.cid SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolpde.cint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cint, type:int, comment:null), ] @@ -1978,13 +1978,13 @@ select * from testAltCol PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColPDD +PREHOOK: Output: default@testaltcolpdd POSTHOOK: query: create table testAltColPDD stored as parquet tblproperties ("parquet.enable.dictionary"="false") as select * from testAltCol POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColPDD +POSTHOOK: Output: default@testaltcolpdd POSTHOOK: Lineage: testaltcolpdd.cbigint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolpdd.cid SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cid, type:tinyint, comment:null), ] POSTHOOK: Lineage: testaltcolpdd.cint SIMPLE [(testaltcol)testaltcol.FieldSchema(name:cint, type:int, comment:null), ] diff --git ql/src/test/results/clientpositive/typechangetest.q.out ql/src/test/results/clientpositive/typechangetest.q.out index 4a617d70b4..73f028cd77 100644 --- ql/src/test/results/clientpositive/typechangetest.q.out +++ ql/src/test/results/clientpositive/typechangetest.q.out @@ -15,7 +15,7 @@ PREHOOK: query: create table testAltCol_n0 cBoolean BOOLEAN) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAltCol_n0 +PREHOOK: Output: default@testaltcol_n0 POSTHOOK: query: create table testAltCol_n0 (cId TINYINT, cTimeStamp TIMESTAMP, @@ -29,7 +29,7 @@ POSTHOOK: query: create table testAltCol_n0 cBoolean BOOLEAN) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltCol_n0 +POSTHOOK: Output: default@testaltcol_n0 PREHOOK: query: insert into testAltCol_n0 values (1, '2017-11-07 09:02:49.999999999', @@ -238,12 +238,12 @@ PREHOOK: query: create table testAltColT_n0 stored as textfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColT_n0 +PREHOOK: Output: default@testaltcolt_n0 POSTHOOK: query: create table testAltColT_n0 stored as textfile as select * from testAltCol_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColT_n0 +POSTHOOK: Output: default@testaltcolt_n0 POSTHOOK: Lineage: testaltcolt_n0.cbigint SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolt_n0.cboolean SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cboolean, type:boolean, comment:null), ] POSTHOOK: Lineage: testaltcolt_n0.cdecimal SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cdecimal, type:decimal(38,18), comment:null), ] @@ -698,12 +698,12 @@ PREHOOK: query: create table testAltColSF_n0 stored as sequencefile as select * PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColSF_n0 +PREHOOK: Output: default@testaltcolsf_n0 POSTHOOK: query: create table testAltColSF_n0 stored as sequencefile as select * from testAltCol_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColSF_n0 +POSTHOOK: Output: default@testaltcolsf_n0 POSTHOOK: Lineage: testaltcolsf_n0.cbigint SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolsf_n0.cboolean SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cboolean, type:boolean, comment:null), ] POSTHOOK: Lineage: testaltcolsf_n0.cdecimal SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cdecimal, type:decimal(38,18), comment:null), ] @@ -1158,12 +1158,12 @@ PREHOOK: query: create table testAltColORC_n0 stored as orc as select * from tes PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColORC_n0 +PREHOOK: Output: default@testaltcolorc_n0 POSTHOOK: query: create table testAltColORC_n0 stored as orc as select * from testAltCol_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColORC_n0 +POSTHOOK: Output: default@testaltcolorc_n0 POSTHOOK: Lineage: testaltcolorc_n0.cbigint SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolorc_n0.cboolean SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cboolean, type:boolean, comment:null), ] POSTHOOK: Lineage: testaltcolorc_n0.cdecimal SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cdecimal, type:decimal(38,18), comment:null), ] @@ -1618,12 +1618,12 @@ PREHOOK: query: create table testAltColRCF_n0 stored as rcfile as select * from PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColRCF_n0 +PREHOOK: Output: default@testaltcolrcf_n0 POSTHOOK: query: create table testAltColRCF_n0 stored as rcfile as select * from testAltCol_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColRCF_n0 +POSTHOOK: Output: default@testaltcolrcf_n0 POSTHOOK: Lineage: testaltcolrcf_n0.cbigint SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n0.cboolean SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cboolean, type:boolean, comment:null), ] POSTHOOK: Lineage: testaltcolrcf_n0.cdecimal SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cdecimal, type:decimal(38,18), comment:null), ] @@ -2078,12 +2078,12 @@ PREHOOK: query: create table testAltColP stored as parquet as select * from test PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@testaltcol_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@testAltColP +PREHOOK: Output: default@testaltcolp POSTHOOK: query: create table testAltColP stored as parquet as select * from testAltCol_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@testaltcol_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAltColP +POSTHOOK: Output: default@testaltcolp POSTHOOK: Lineage: testaltcolp.cbigint SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cbigint, type:bigint, comment:null), ] POSTHOOK: Lineage: testaltcolp.cboolean SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cboolean, type:boolean, comment:null), ] POSTHOOK: Lineage: testaltcolp.cdecimal SIMPLE [(testaltcol_n0)testaltcol_n0.FieldSchema(name:cdecimal, type:decimal(38,18), comment:null), ] diff --git ql/src/test/results/clientpositive/udf_like.q.out ql/src/test/results/clientpositive/udf_like.q.out index 8a0bb986eb..1c1bd01dc0 100644 --- ql/src/test/results/clientpositive/udf_like.q.out +++ ql/src/test/results/clientpositive/udf_like.q.out @@ -87,11 +87,11 @@ true true false true false PREHOOK: query: CREATE TEMPORARY TABLE SplitLines(`id` string) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@SplitLines +PREHOOK: Output: default@splitlines POSTHOOK: query: CREATE TEMPORARY TABLE SplitLines(`id` string) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@SplitLines +POSTHOOK: Output: default@splitlines PREHOOK: query: INSERT INTO SplitLines SELECT 'withdraw\ncash' PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/union17.q.out ql/src/test/results/clientpositive/union17.q.out index 45b0862965..0c5440b48d 100644 --- ql/src/test/results/clientpositive/union17.q.out +++ ql/src/test/results/clientpositive/union17.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n78(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n78 +PREHOOK: Output: default@dest1_n78 POSTHOOK: query: CREATE TABLE DEST1_n78(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n78 +POSTHOOK: Output: default@dest1_n78 PREHOOK: query: CREATE TABLE DEST2_n17(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n17 +PREHOOK: Output: default@dest2_n17 POSTHOOK: query: CREATE TABLE DEST2_n17(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n17 +POSTHOOK: Output: default@dest2_n17 PREHOOK: query: explain FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 UNION ALL diff --git ql/src/test/results/clientpositive/union18.q.out ql/src/test/results/clientpositive/union18.q.out index 841d40fffe..69edbe4122 100644 --- ql/src/test/results/clientpositive/union18.q.out +++ ql/src/test/results/clientpositive/union18.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n128(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n128 +PREHOOK: Output: default@dest1_n128 POSTHOOK: query: CREATE TABLE DEST1_n128(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n128 +POSTHOOK: Output: default@dest1_n128 PREHOOK: query: CREATE TABLE DEST2_n33(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n33 +PREHOOK: Output: default@dest2_n33 POSTHOOK: query: CREATE TABLE DEST2_n33(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n33 +POSTHOOK: Output: default@dest2_n33 PREHOOK: query: explain FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 UNION ALL diff --git ql/src/test/results/clientpositive/union19.q.out ql/src/test/results/clientpositive/union19.q.out index 1907b9024e..e72bb253e6 100644 --- ql/src/test/results/clientpositive/union19.q.out +++ ql/src/test/results/clientpositive/union19.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n86(key STRING, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n86 +PREHOOK: Output: default@dest1_n86 POSTHOOK: query: CREATE TABLE DEST1_n86(key STRING, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n86 +POSTHOOK: Output: default@dest1_n86 PREHOOK: query: CREATE TABLE DEST2_n21(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n21 +PREHOOK: Output: default@dest2_n21 POSTHOOK: query: CREATE TABLE DEST2_n21(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n21 +POSTHOOK: Output: default@dest2_n21 PREHOOK: query: explain FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 UNION ALL diff --git ql/src/test/results/clientpositive/union25.q.out ql/src/test/results/clientpositive/union25.q.out index 82b0b01bb0..442460990d 100644 --- ql/src/test/results/clientpositive/union25.q.out +++ ql/src/test/results/clientpositive/union25.q.out @@ -230,10 +230,10 @@ STAGE PLANS: Stage: Stage-7 Create Table columns: counts bigint, key string, value string - name: default.tmp_unionall input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.tmp_unionall Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/union_remove_1.q.out ql/src/test/results/clientpositive/union_remove_1.q.out index dda4674044..cee140c9bf 100644 --- ql/src/test/results/clientpositive/union_remove_1.q.out +++ ql/src/test/results/clientpositive/union_remove_1.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1 +PREHOOK: Output: default@inputtbl1 POSTHOOK: query: create table inputTbl1(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1 +POSTHOOK: Output: default@inputtbl1 PREHOOK: query: create table outputTbl1(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1 +PREHOOK: Output: default@outputtbl1 POSTHOOK: query: create table outputTbl1(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1 +POSTHOOK: Output: default@outputtbl1 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_10.q.out ql/src/test/results/clientpositive/union_remove_10.q.out index 6002274420..1d58a20efc 100644 --- ql/src/test/results/clientpositive/union_remove_10.q.out +++ ql/src/test/results/clientpositive/union_remove_10.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n7(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n7 +PREHOOK: Output: default@inputtbl1_n7 POSTHOOK: query: create table inputTbl1_n7(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n7 +POSTHOOK: Output: default@inputtbl1_n7 PREHOOK: query: create table outputTbl1_n9(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n9 +PREHOOK: Output: default@outputtbl1_n9 POSTHOOK: query: create table outputTbl1_n9(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n9 +POSTHOOK: Output: default@outputtbl1_n9 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n7 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_11.q.out ql/src/test/results/clientpositive/union_remove_11.q.out index a66e963a45..1bfa5dcee6 100644 --- ql/src/test/results/clientpositive/union_remove_11.q.out +++ ql/src/test/results/clientpositive/union_remove_11.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n14(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n14 +PREHOOK: Output: default@inputtbl1_n14 POSTHOOK: query: create table inputTbl1_n14(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n14 +POSTHOOK: Output: default@inputtbl1_n14 PREHOOK: query: create table outputTbl1_n21(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n21 +PREHOOK: Output: default@outputtbl1_n21 POSTHOOK: query: create table outputTbl1_n21(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n21 +POSTHOOK: Output: default@outputtbl1_n21 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n14 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_12.q.out ql/src/test/results/clientpositive/union_remove_12.q.out index 548a514bf1..e01ddfd9b2 100644 --- ql/src/test/results/clientpositive/union_remove_12.q.out +++ ql/src/test/results/clientpositive/union_remove_12.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n21(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n21 +PREHOOK: Output: default@inputtbl1_n21 POSTHOOK: query: create table inputTbl1_n21(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n21 +POSTHOOK: Output: default@inputtbl1_n21 PREHOOK: query: create table outputTbl1_n29(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n29 +PREHOOK: Output: default@outputtbl1_n29 POSTHOOK: query: create table outputTbl1_n29(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n29 +POSTHOOK: Output: default@outputtbl1_n29 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n21 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_13.q.out ql/src/test/results/clientpositive/union_remove_13.q.out index 797dcd4749..3ae2aaf199 100644 --- ql/src/test/results/clientpositive/union_remove_13.q.out +++ ql/src/test/results/clientpositive/union_remove_13.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n2(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n2 +PREHOOK: Output: default@inputtbl1_n2 POSTHOOK: query: create table inputTbl1_n2(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n2 +POSTHOOK: Output: default@inputtbl1_n2 PREHOOK: query: create table outputTbl1_n3(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n3 +PREHOOK: Output: default@outputtbl1_n3 POSTHOOK: query: create table outputTbl1_n3(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n3 +POSTHOOK: Output: default@outputtbl1_n3 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n2 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_14.q.out ql/src/test/results/clientpositive/union_remove_14.q.out index 528a2b2fc4..cafd116189 100644 --- ql/src/test/results/clientpositive/union_remove_14.q.out +++ ql/src/test/results/clientpositive/union_remove_14.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n11(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n11 +PREHOOK: Output: default@inputtbl1_n11 POSTHOOK: query: create table inputTbl1_n11(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n11 +POSTHOOK: Output: default@inputtbl1_n11 PREHOOK: query: create table outputTbl1_n16(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n16 +PREHOOK: Output: default@outputtbl1_n16 POSTHOOK: query: create table outputTbl1_n16(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n16 +POSTHOOK: Output: default@outputtbl1_n16 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n11 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_15.q.out ql/src/test/results/clientpositive/union_remove_15.q.out index af476e9805..8cd570cac3 100644 --- ql/src/test/results/clientpositive/union_remove_15.q.out +++ ql/src/test/results/clientpositive/union_remove_15.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n18(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n18 +PREHOOK: Output: default@inputtbl1_n18 POSTHOOK: query: create table inputTbl1_n18(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n18 +POSTHOOK: Output: default@inputtbl1_n18 PREHOOK: query: create table outputTbl1_n25(key string, `values` bigint) partitioned by (ds string) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n25 +PREHOOK: Output: default@outputtbl1_n25 POSTHOOK: query: create table outputTbl1_n25(key string, `values` bigint) partitioned by (ds string) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n25 +POSTHOOK: Output: default@outputtbl1_n25 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n18 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_16.q.out ql/src/test/results/clientpositive/union_remove_16.q.out index 337d90b9ab..043bdbe313 100644 --- ql/src/test/results/clientpositive/union_remove_16.q.out +++ ql/src/test/results/clientpositive/union_remove_16.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n23(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n23 +PREHOOK: Output: default@inputtbl1_n23 POSTHOOK: query: create table inputTbl1_n23(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n23 +POSTHOOK: Output: default@inputtbl1_n23 PREHOOK: query: create table outputTbl1_n32(key string, `values` bigint) partitioned by (ds string) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n32 +PREHOOK: Output: default@outputtbl1_n32 POSTHOOK: query: create table outputTbl1_n32(key string, `values` bigint) partitioned by (ds string) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n32 +POSTHOOK: Output: default@outputtbl1_n32 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n23 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_17.q.out ql/src/test/results/clientpositive/union_remove_17.q.out index 43f0662ffa..87799b6c33 100644 --- ql/src/test/results/clientpositive/union_remove_17.q.out +++ ql/src/test/results/clientpositive/union_remove_17.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n3(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n3 +PREHOOK: Output: default@inputtbl1_n3 POSTHOOK: query: create table inputTbl1_n3(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n3 +POSTHOOK: Output: default@inputtbl1_n3 PREHOOK: query: create table outputTbl1_n4(key string, `values` bigint) partitioned by (ds string) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n4 +PREHOOK: Output: default@outputtbl1_n4 POSTHOOK: query: create table outputTbl1_n4(key string, `values` bigint) partitioned by (ds string) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n4 +POSTHOOK: Output: default@outputtbl1_n4 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n3 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_18.q.out ql/src/test/results/clientpositive/union_remove_18.q.out index 9894a437db..1adc5da225 100644 --- ql/src/test/results/clientpositive/union_remove_18.q.out +++ ql/src/test/results/clientpositive/union_remove_18.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n22(key string, ds string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n22 +PREHOOK: Output: default@inputtbl1_n22 POSTHOOK: query: create table inputTbl1_n22(key string, ds string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n22 +POSTHOOK: Output: default@inputtbl1_n22 PREHOOK: query: create table outputTbl1_n30(key string, `values` bigint) partitioned by (ds string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n30 +PREHOOK: Output: default@outputtbl1_n30 POSTHOOK: query: create table outputTbl1_n30(key string, `values` bigint) partitioned by (ds string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n30 +POSTHOOK: Output: default@outputtbl1_n30 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n22 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_19.q.out ql/src/test/results/clientpositive/union_remove_19.q.out index 95819f64d7..e0f73c8375 100644 --- ql/src/test/results/clientpositive/union_remove_19.q.out +++ ql/src/test/results/clientpositive/union_remove_19.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n1(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n1 +PREHOOK: Output: default@inputtbl1_n1 POSTHOOK: query: create table inputTbl1_n1(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n1 +POSTHOOK: Output: default@inputtbl1_n1 PREHOOK: query: create table outputTbl1_n1(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n1 +PREHOOK: Output: default@outputtbl1_n1 POSTHOOK: query: create table outputTbl1_n1(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n1 +POSTHOOK: Output: default@outputtbl1_n1 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_2.q.out ql/src/test/results/clientpositive/union_remove_2.q.out index 00b51385c4..f5f0ac001f 100644 --- ql/src/test/results/clientpositive/union_remove_2.q.out +++ ql/src/test/results/clientpositive/union_remove_2.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n8(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n8 +PREHOOK: Output: default@inputtbl1_n8 POSTHOOK: query: create table inputTbl1_n8(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n8 +POSTHOOK: Output: default@inputtbl1_n8 PREHOOK: query: create table outputTbl1_n11(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n11 +PREHOOK: Output: default@outputtbl1_n11 POSTHOOK: query: create table outputTbl1_n11(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n11 +POSTHOOK: Output: default@outputtbl1_n11 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n8 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_20.q.out ql/src/test/results/clientpositive/union_remove_20.q.out index b95f263d27..08e6b82eb0 100644 --- ql/src/test/results/clientpositive/union_remove_20.q.out +++ ql/src/test/results/clientpositive/union_remove_20.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n19(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n19 +PREHOOK: Output: default@inputtbl1_n19 POSTHOOK: query: create table inputTbl1_n19(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n19 +POSTHOOK: Output: default@inputtbl1_n19 PREHOOK: query: create table outputTbl1_n27(`values` bigint, key string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n27 +PREHOOK: Output: default@outputtbl1_n27 POSTHOOK: query: create table outputTbl1_n27(`values` bigint, key string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n27 +POSTHOOK: Output: default@outputtbl1_n27 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n19 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_21.q.out ql/src/test/results/clientpositive/union_remove_21.q.out index 4f2c4d0b01..e86458a4e5 100644 --- ql/src/test/results/clientpositive/union_remove_21.q.out +++ ql/src/test/results/clientpositive/union_remove_21.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n12(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n12 +PREHOOK: Output: default@inputtbl1_n12 POSTHOOK: query: create table inputTbl1_n12(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n12 +POSTHOOK: Output: default@inputtbl1_n12 PREHOOK: query: create table outputTbl1_n17(key string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n17 +PREHOOK: Output: default@outputtbl1_n17 POSTHOOK: query: create table outputTbl1_n17(key string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n17 +POSTHOOK: Output: default@outputtbl1_n17 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n12 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_22.q.out ql/src/test/results/clientpositive/union_remove_22.q.out index 8b4935d410..959ddfa325 100644 --- ql/src/test/results/clientpositive/union_remove_22.q.out +++ ql/src/test/results/clientpositive/union_remove_22.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n5(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n5 +PREHOOK: Output: default@inputtbl1_n5 POSTHOOK: query: create table inputTbl1_n5(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n5 +POSTHOOK: Output: default@inputtbl1_n5 PREHOOK: query: create table outputTbl1_n7(key string, `values` bigint, values2 bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n7 +PREHOOK: Output: default@outputtbl1_n7 POSTHOOK: query: create table outputTbl1_n7(key string, `values` bigint, values2 bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n7 +POSTHOOK: Output: default@outputtbl1_n7 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n5 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_23.q.out ql/src/test/results/clientpositive/union_remove_23.q.out index 0188b56045..d7468786c7 100644 --- ql/src/test/results/clientpositive/union_remove_23.q.out +++ ql/src/test/results/clientpositive/union_remove_23.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n25(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n25 +PREHOOK: Output: default@inputtbl1_n25 POSTHOOK: query: create table inputTbl1_n25(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n25 +POSTHOOK: Output: default@inputtbl1_n25 PREHOOK: query: create table outputTbl1_n34(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n34 +PREHOOK: Output: default@outputtbl1_n34 POSTHOOK: query: create table outputTbl1_n34(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n34 +POSTHOOK: Output: default@outputtbl1_n34 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n25 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_24.q.out ql/src/test/results/clientpositive/union_remove_24.q.out index 085f910f98..88e73630eb 100644 --- ql/src/test/results/clientpositive/union_remove_24.q.out +++ ql/src/test/results/clientpositive/union_remove_24.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n20(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n20 +PREHOOK: Output: default@inputtbl1_n20 POSTHOOK: query: create table inputTbl1_n20(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n20 +POSTHOOK: Output: default@inputtbl1_n20 PREHOOK: query: create table outputTbl1_n28(key double, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n28 +PREHOOK: Output: default@outputtbl1_n28 POSTHOOK: query: create table outputTbl1_n28(key double, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n28 +POSTHOOK: Output: default@outputtbl1_n28 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n20 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_25.q.out ql/src/test/results/clientpositive/union_remove_25.q.out index 6a324e5e9d..812328dae3 100644 --- ql/src/test/results/clientpositive/union_remove_25.q.out +++ ql/src/test/results/clientpositive/union_remove_25.q.out @@ -1,35 +1,35 @@ PREHOOK: query: create table inputTbl1_n13(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n13 +PREHOOK: Output: default@inputtbl1_n13 POSTHOOK: query: create table inputTbl1_n13(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n13 +POSTHOOK: Output: default@inputtbl1_n13 PREHOOK: query: create table outputTbl1_n19(key string, `values` bigint) partitioned by (ds string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n19 +PREHOOK: Output: default@outputtbl1_n19 POSTHOOK: query: create table outputTbl1_n19(key string, `values` bigint) partitioned by (ds string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n19 +POSTHOOK: Output: default@outputtbl1_n19 PREHOOK: query: create table outputTbl2_n6(key string, `values` bigint) partitioned by (ds string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n6 +PREHOOK: Output: default@outputtbl2_n6 POSTHOOK: query: create table outputTbl2_n6(key string, `values` bigint) partitioned by (ds string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n6 +POSTHOOK: Output: default@outputtbl2_n6 PREHOOK: query: create table outputTbl3_n3(key string, `values` bigint) partitioned by (ds string,hr string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl3_n3 +PREHOOK: Output: default@outputtbl3_n3 POSTHOOK: query: create table outputTbl3_n3(key string, `values` bigint) partitioned by (ds string,hr string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl3_n3 +POSTHOOK: Output: default@outputtbl3_n3 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n13 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_3.q.out ql/src/test/results/clientpositive/union_remove_3.q.out index cd8899ea49..74b2b2ebb7 100644 --- ql/src/test/results/clientpositive/union_remove_3.q.out +++ ql/src/test/results/clientpositive/union_remove_3.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n16(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n16 +PREHOOK: Output: default@inputtbl1_n16 POSTHOOK: query: create table inputTbl1_n16(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n16 +POSTHOOK: Output: default@inputtbl1_n16 PREHOOK: query: create table outputTbl1_n23(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n23 +PREHOOK: Output: default@outputtbl1_n23 POSTHOOK: query: create table outputTbl1_n23(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n23 +POSTHOOK: Output: default@outputtbl1_n23 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n16 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_4.q.out ql/src/test/results/clientpositive/union_remove_4.q.out index ed1c145902..05e8ebd04f 100644 --- ql/src/test/results/clientpositive/union_remove_4.q.out +++ ql/src/test/results/clientpositive/union_remove_4.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n24(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n24 +PREHOOK: Output: default@inputtbl1_n24 POSTHOOK: query: create table inputTbl1_n24(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n24 +POSTHOOK: Output: default@inputtbl1_n24 PREHOOK: query: create table outputTbl1_n33(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n33 +PREHOOK: Output: default@outputtbl1_n33 POSTHOOK: query: create table outputTbl1_n33(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n33 +POSTHOOK: Output: default@outputtbl1_n33 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n24 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_5.q.out ql/src/test/results/clientpositive/union_remove_5.q.out index 92a24e0dd9..074c4b1175 100644 --- ql/src/test/results/clientpositive/union_remove_5.q.out +++ ql/src/test/results/clientpositive/union_remove_5.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n4(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n4 +PREHOOK: Output: default@inputtbl1_n4 POSTHOOK: query: create table inputTbl1_n4(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n4 +POSTHOOK: Output: default@inputtbl1_n4 PREHOOK: query: create table outputTbl1_n6(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n6 +PREHOOK: Output: default@outputtbl1_n6 POSTHOOK: query: create table outputTbl1_n6(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n6 +POSTHOOK: Output: default@outputtbl1_n6 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n4 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_6.q.out ql/src/test/results/clientpositive/union_remove_6.q.out index 407bb3336c..252a090483 100644 --- ql/src/test/results/clientpositive/union_remove_6.q.out +++ ql/src/test/results/clientpositive/union_remove_6.q.out @@ -1,27 +1,27 @@ PREHOOK: query: create table inputTbl1_n10(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n10 +PREHOOK: Output: default@inputtbl1_n10 POSTHOOK: query: create table inputTbl1_n10(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n10 +POSTHOOK: Output: default@inputtbl1_n10 PREHOOK: query: create table outputTbl1_n14(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n14 +PREHOOK: Output: default@outputtbl1_n14 POSTHOOK: query: create table outputTbl1_n14(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n14 +POSTHOOK: Output: default@outputtbl1_n14 PREHOOK: query: create table outputTbl2_n4(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n4 +PREHOOK: Output: default@outputtbl2_n4 POSTHOOK: query: create table outputTbl2_n4(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n4 +POSTHOOK: Output: default@outputtbl2_n4 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n10 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_6_subq.q.out ql/src/test/results/clientpositive/union_remove_6_subq.q.out index 8b68e4dafe..4865a58b09 100644 --- ql/src/test/results/clientpositive/union_remove_6_subq.q.out +++ ql/src/test/results/clientpositive/union_remove_6_subq.q.out @@ -1,27 +1,27 @@ PREHOOK: query: create table inputTbl1_n0(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n0 +PREHOOK: Output: default@inputtbl1_n0 POSTHOOK: query: create table inputTbl1_n0(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n0 +POSTHOOK: Output: default@inputtbl1_n0 PREHOOK: query: create table outputTbl1_n0(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n0 +PREHOOK: Output: default@outputtbl1_n0 POSTHOOK: query: create table outputTbl1_n0(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n0 +POSTHOOK: Output: default@outputtbl1_n0 PREHOOK: query: create table outputTbl2(key string, `values` bigint) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2 +PREHOOK: Output: default@outputtbl2 POSTHOOK: query: create table outputTbl2(key string, `values` bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2 +POSTHOOK: Output: default@outputtbl2 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_7.q.out ql/src/test/results/clientpositive/union_remove_7.q.out index b3014765f6..80e4a304e1 100644 --- ql/src/test/results/clientpositive/union_remove_7.q.out +++ ql/src/test/results/clientpositive/union_remove_7.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n17(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n17 +PREHOOK: Output: default@inputtbl1_n17 POSTHOOK: query: create table inputTbl1_n17(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n17 +POSTHOOK: Output: default@inputtbl1_n17 PREHOOK: query: create table outputTbl1_n24(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n24 +PREHOOK: Output: default@outputtbl1_n24 POSTHOOK: query: create table outputTbl1_n24(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n24 +POSTHOOK: Output: default@outputtbl1_n24 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n17 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_8.q.out ql/src/test/results/clientpositive/union_remove_8.q.out index 942b54985c..1de26e9ab4 100644 --- ql/src/test/results/clientpositive/union_remove_8.q.out +++ ql/src/test/results/clientpositive/union_remove_8.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n9(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n9 +PREHOOK: Output: default@inputtbl1_n9 POSTHOOK: query: create table inputTbl1_n9(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n9 +POSTHOOK: Output: default@inputtbl1_n9 PREHOOK: query: create table outputTbl1_n12(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n12 +PREHOOK: Output: default@outputtbl1_n12 POSTHOOK: query: create table outputTbl1_n12(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n12 +POSTHOOK: Output: default@outputtbl1_n12 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n9 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/union_remove_9.q.out ql/src/test/results/clientpositive/union_remove_9.q.out index 2e2693be37..eb2cdfc116 100644 --- ql/src/test/results/clientpositive/union_remove_9.q.out +++ ql/src/test/results/clientpositive/union_remove_9.q.out @@ -1,19 +1,19 @@ PREHOOK: query: create table inputTbl1_n15(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@inputTbl1_n15 +PREHOOK: Output: default@inputtbl1_n15 POSTHOOK: query: create table inputTbl1_n15(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@inputTbl1_n15 +POSTHOOK: Output: default@inputtbl1_n15 PREHOOK: query: create table outputTbl1_n22(key string, `values` bigint) stored as rcfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n22 +PREHOOK: Output: default@outputtbl1_n22 POSTHOOK: query: create table outputTbl1_n22(key string, `values` bigint) stored as rcfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n22 +POSTHOOK: Output: default@outputtbl1_n22 PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1_n15 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/uniquejoin.q.out ql/src/test/results/clientpositive/uniquejoin.q.out index d1b66de383..fa239f21d9 100644 --- ql/src/test/results/clientpositive/uniquejoin.q.out +++ ql/src/test/results/clientpositive/uniquejoin.q.out @@ -1,27 +1,27 @@ PREHOOK: query: CREATE TABLE T1_n1(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n1 +PREHOOK: Output: default@t1_n1 POSTHOOK: query: CREATE TABLE T1_n1(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n1 +POSTHOOK: Output: default@t1_n1 PREHOOK: query: CREATE TABLE T2_n1(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n1 +PREHOOK: Output: default@t2_n1 POSTHOOK: query: CREATE TABLE T2_n1(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n1 +POSTHOOK: Output: default@t2_n1 PREHOOK: query: CREATE TABLE T3_n0(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n0 +PREHOOK: Output: default@t3_n0 POSTHOOK: query: CREATE TABLE T3_n0(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n0 +POSTHOOK: Output: default@t3_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/vector_decimal_10_0.q.out new file mode 100644 index 0000000000..0dc2d25e27 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_10_0.q.out @@ -0,0 +1,241 @@ +PREHOOK: query: DROP TABLE IF EXISTS decimal_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS decimal_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS `decimal` +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS `decimal` +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE decimal_txt (`dec` decimal) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: CREATE TABLE decimal_txt (`dec` decimal) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE decimal_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE decimal_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: CREATE TABLE `DECIMAL` STORED AS ORC AS SELECT * FROM decimal_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal +POSTHOOK: query: CREATE TABLE `DECIMAL` STORED AS ORC AS SELECT * FROM decimal_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal +POSTHOOK: Lineage: decimal.dec SIMPLE [(decimal_txt)decimal_txt.FieldSchema(name:dec, type:decimal(10,0), comment:null), ] +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT `dec` FROM `DECIMAL` order by `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT `dec` FROM `DECIMAL` order by `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal + Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] + Select Operator + expressions: dec (type: decimal(10,0)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + null sort order: z + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: dec:decimal(10,0)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT `dec` FROM `DECIMAL` order by `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec` FROM `DECIMAL` order by `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal +#### A masked pattern was here #### +1000000000 +NULL +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT `dec` FROM `decimal_txt` order by `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_txt +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT `dec` FROM `decimal_txt` order by `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_txt +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_txt + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:dec:decimal(10,0)/DECIMAL_64, 1:ROW__ID:struct] + Select Operator + expressions: dec (type: decimal(10,0)) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,0)) + null sort order: z + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: dec:decimal(10,0)/DECIMAL_64 + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,0)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT `dec` FROM `decimal_txt` order by `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_txt +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec` FROM `decimal_txt` order by `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_txt +#### A masked pattern was here #### +1000000000 +NULL +PREHOOK: query: DROP TABLE DECIMAL_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_txt +PREHOOK: Output: default@decimal_txt +POSTHOOK: query: DROP TABLE DECIMAL_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_txt +POSTHOOK: Output: default@decimal_txt +PREHOOK: query: DROP TABLE `DECIMAL` +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal +PREHOOK: Output: default@decimal +POSTHOOK: query: DROP TABLE `DECIMAL` +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal +POSTHOOK: Output: default@decimal diff --git ql/src/test/results/clientpositive/vector_decimal_6.q.out ql/src/test/results/clientpositive/vector_decimal_6.q.out new file mode 100644 index 0000000000..aabf0ea727 --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -0,0 +1,689 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_6_3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_6_1_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_6_1_txt +POSTHOOK: query: CREATE TABLE DECIMAL_6_1_txt(key decimal(10,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_6_1_txt +PREHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_6_2_txt +POSTHOOK: query: CREATE TABLE DECIMAL_6_2_txt(key decimal(17,4), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_6_2_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_6_1_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_6_1_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_6_2_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_6_2_txt +PREHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_6_1 +POSTHOOK: query: CREATE TABLE DECIMAL_6_1(key decimal(10,5), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_6_1 +PREHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_6_2 +POSTHOOK: query: CREATE TABLE DECIMAL_6_2(key decimal(17,4), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_6_2 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_1 SELECT * FROM DECIMAL_6_1_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1_txt +PREHOOK: Output: default@decimal_6_1 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_1 SELECT * FROM DECIMAL_6_1_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1_txt +POSTHOOK: Output: default@decimal_6_1 +POSTHOOK: Lineage: decimal_6_1.key SIMPLE [(decimal_6_1_txt)decimal_6_1_txt.FieldSchema(name:key, type:decimal(10,5), comment:null), ] +POSTHOOK: Lineage: decimal_6_1.value SIMPLE [(decimal_6_1_txt)decimal_6_1_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_2 SELECT * FROM DECIMAL_6_2_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_2_txt +PREHOOK: Output: default@decimal_6_2 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_6_2 SELECT * FROM DECIMAL_6_2_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_2_txt +POSTHOOK: Output: default@decimal_6_2 +POSTHOOK: Lineage: decimal_6_2.key SIMPLE [(decimal_6_2_txt)decimal_6_2_txt.FieldSchema(name:key, type:decimal(17,4), comment:null), ] +POSTHOOK: Lineage: decimal_6_2.value SIMPLE [(decimal_6_2_txt)decimal_6_2_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT * FROM DECIMAL_6_1 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT * FROM DECIMAL_6_1 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_6_1 + Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:decimal(10,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] + Select Operator + expressions: key (type: decimal(10,5)), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(10,5)), _col1 (type: int) + null sort order: zz + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:decimal(10,5)/DECIMAL_64, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(10,5)), KEY.reducesinkkey1 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +#### A masked pattern was here #### +-4400.00000 4400 +-1255.49000 -1255 +-1.12200 -11 +-1.12000 -1 +-0.33300 0 +-0.30000 0 +0.00000 0 +0.00000 0 +0.33300 0 +1.00000 1 +1.00000 1 +1.12000 1 +1.12200 1 +2.00000 2 +3.14000 3 +3.14000 3 +3.14000 4 +10.00000 10 +10.73433 5 +124.00000 124 +125.20000 125 +23232.23435 2 +NULL -1234567890 +NULL 0 +NULL 3 +NULL 4 +NULL 1234567890 +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT * FROM DECIMAL_6_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT * FROM DECIMAL_6_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_6_2 + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:decimal(17,4)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] + Select Operator + expressions: key (type: decimal(17,4)), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(17,4)), _col1 (type: int) + null sort order: zz + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:decimal(17,4)/DECIMAL_64, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(17,4)), KEY.reducesinkkey1 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +-1234567890.1235 -1234567890 +-4400.0000 4400 +-1255.4900 -1255 +-1.1220 -11 +-1.1200 -1 +-0.3330 0 +-0.3000 0 +0.0000 0 +0.0000 0 +0.3330 0 +1.0000 1 +1.0000 1 +1.1200 1 +1.1220 1 +2.0000 2 +3.1400 3 +3.1400 3 +3.1400 4 +10.0000 10 +10.7343 5 +124.0000 124 +125.2000 125 +23232.2344 2 +2389432.2375 3 +2389432.2375 4 +1234567890.1235 1234567890 +NULL 0 +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_6_1 + Statistics: Num rows: 27 Data size: 2576 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( key AS decimal(18,5)) (type: decimal(18,5)) + outputColumnNames: _col0 + Statistics: Num rows: 27 Data size: 2576 Basic stats: COMPLETE Column stats: COMPLETE + Union + Statistics: Num rows: 54 Data size: 5600 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(18,5)) + null sort order: z + sort order: + + Statistics: Num rows: 54 Data size: 5600 Basic stats: COMPLETE Column stats: COMPLETE + TableScan + alias: decimal_6_2 + Statistics: Num rows: 27 Data size: 3024 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: CAST( key AS decimal(18,5)) (type: decimal(18,5)) + outputColumnNames: _col0 + Statistics: Num rows: 27 Data size: 3024 Basic stats: COMPLETE Column stats: COMPLETE + Union + Statistics: Num rows: 54 Data size: 5600 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: decimal(18,5)) + null sort order: z + sort order: + + Statistics: Num rows: 54 Data size: 5600 Basic stats: COMPLETE Column stats: COMPLETE + Map Vectorization: + enabled: false + enabledConditionsNotMet: Vectorized map work only works with 1 TableScanOperator IS false + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: decimal(18,5)) + outputColumnNames: _col0 + Statistics: Num rows: 54 Data size: 4928 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 54 Data size: 4928 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT T.key from ( + SELECT key, value from DECIMAL_6_1 + UNION ALL + SELECT key, value from DECIMAL_6_2 +) T order by T.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Input: default@decimal_6_2 +#### A masked pattern was here #### +-1234567890.12350 +-4400.00000 +-4400.00000 +-1255.49000 +-1255.49000 +-1.12200 +-1.12200 +-1.12000 +-1.12000 +-0.33300 +-0.33300 +-0.30000 +-0.30000 +0.00000 +0.00000 +0.00000 +0.00000 +0.33300 +0.33300 +1.00000 +1.00000 +1.00000 +1.00000 +1.12000 +1.12000 +1.12200 +1.12200 +2.00000 +2.00000 +3.14000 +3.14000 +3.14000 +3.14000 +3.14000 +3.14000 +10.00000 +10.00000 +10.73430 +10.73433 +124.00000 +124.00000 +125.20000 +125.20000 +23232.23435 +23232.23440 +2389432.23750 +2389432.23750 +1234567890.12350 +NULL +NULL +NULL +NULL +NULL +NULL +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_6_3 +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_6_3 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + Stage-3 depends on stages: Stage-0 + Stage-2 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_6_1 + Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:decimal(10,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] + Select Operator + expressions: (key + 5.5) (type: decimal(11,5)), (value * 11) (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [3, 4] + selectExpressions: Decimal64ColAddDecimal64Scalar(col 0:decimal(10,5)/DECIMAL_64, decimal64Val 550000, decimalVal 5.5) -> 3:decimal(11,5)/DECIMAL_64, LongColMultiplyLongScalar(col 1:int, val 11) -> 4:int + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + null sort order: z + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkOperator + native: false + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: decimal(11,5)) + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:decimal(10,5)/DECIMAL_64, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [decimal(11,5)/DECIMAL_64, bigint] + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: decimal(11,5)), KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: default.decimal_6_3 + Select Operator + expressions: _col0 (type: decimal(11,5)), _col1 (type: int) + outputColumnNames: col1, col2 + Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: compute_stats(col1, 'hll'), compute_stats(col2, 'hll') + mode: complete + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 1056 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: struct), _col1 (type: struct) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 1056 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 1056 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + Stage: Stage-3 + Create Table + columns: k decimal(11,5), v int + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + serde name: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: hive.default.decimal_6_3 + + Stage: Stage-2 + Stats Work + Basic Stats Work: + Column Stats Desc: + Columns: k, v + Column Types: decimal(11,5), int + Table: default.decimal_6_3 + +PREHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@decimal_6_1 +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_6_3 +POSTHOOK: query: CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@decimal_6_1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_6_3 +POSTHOOK: Lineage: decimal_6_3.k EXPRESSION [(decimal_6_1)decimal_6_1.FieldSchema(name:key, type:decimal(10,5), comment:null), ] +POSTHOOK: Lineage: decimal_6_3.v EXPRESSION [(decimal_6_1)decimal_6_1.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: desc DECIMAL_6_3 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@decimal_6_3 +POSTHOOK: query: desc DECIMAL_6_3 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@decimal_6_3 +k decimal(11,5) +v int +PREHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_6_3 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_6_3 +#### A masked pattern was here #### +-4394.50000 48400 +-1249.99000 -13805 +4.37800 -121 +4.38000 -11 +5.16700 0 +5.20000 0 +5.50000 0 +5.50000 0 +5.83300 0 +6.50000 11 +6.50000 11 +6.62000 11 +6.62200 11 +7.50000 22 +8.64000 33 +8.64000 33 +8.64000 44 +15.50000 110 +16.23433 55 +129.50000 1364 +130.70000 1375 +23237.73435 22 +NULL -695344902 +NULL 0 +NULL 33 +NULL 44 +NULL 695344902 diff --git ql/src/test/results/clientpositive/vector_decimal_col_scalar_division.q.out ql/src/test/results/clientpositive/vector_decimal_col_scalar_division.q.out index 663313eab1..a1f4f738c7 100644 --- ql/src/test/results/clientpositive/vector_decimal_col_scalar_division.q.out +++ ql/src/test/results/clientpositive/vector_decimal_col_scalar_division.q.out @@ -17,7 +17,7 @@ PREHOOK: query: CREATE temporary TABLE `catalog_Sales`( `cs_net_profit` decimal(7,2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@catalog_Sales +PREHOOK: Output: default@catalog_sales POSTHOOK: query: CREATE temporary TABLE `catalog_Sales`( `cs_quantity` int, `cs_wholesale_cost` decimal(7,2), @@ -37,7 +37,7 @@ POSTHOOK: query: CREATE temporary TABLE `catalog_Sales`( `cs_net_profit` decimal(7,2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@catalog_Sales +POSTHOOK: Output: default@catalog_sales PREHOOK: query: explain vectorization detail select max((((cs_ext_list_price - cs_ext_wholesale_cost) - cs_ext_discount_amt) + cs_ext_sales_price) / 2) from catalog_sales PREHOOK: type: QUERY PREHOOK: Input: default@catalog_sales diff --git ql/src/test/results/clientpositive/vector_decimal_udf2.q.out ql/src/test/results/clientpositive/vector_decimal_udf2.q.out new file mode 100644 index 0000000000..f76360c13f --- /dev/null +++ ql/src/test/results/clientpositive/vector_decimal_udf2.q.out @@ -0,0 +1,528 @@ +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_n0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_n0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE DECIMAL_UDF2_txt (key decimal(14,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_udf2_txt +POSTHOOK: query: CREATE TABLE DECIMAL_UDF2_txt (key decimal(14,5), value int) +ROW FORMAT DELIMITED + FIELDS TERMINATED BY ' ' +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_udf2_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@decimal_udf2_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@decimal_udf2_txt +PREHOOK: query: CREATE TABLE DECIMAL_UDF2_n0 (key decimal(14,5), value int) +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@decimal_udf2_n0 +POSTHOOK: query: CREATE TABLE DECIMAL_UDF2_n0 (key decimal(14,5), value int) +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@decimal_udf2_n0 +PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF2_n0 SELECT * FROM DECIMAL_UDF2_txt +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +PREHOOK: Output: default@decimal_udf2_n0 +POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_UDF2_n0 SELECT * FROM DECIMAL_UDF2_txt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +POSTHOOK: Output: default@decimal_udf2_n0 +POSTHOOK: Lineage: decimal_udf2_n0.key SIMPLE [(decimal_udf2_txt)decimal_udf2_txt.FieldSchema(name:key, type:decimal(14,5), comment:null), ] +POSTHOOK: Lineage: decimal_udf2_n0.value SIMPLE [(decimal_udf2_txt)decimal_udf2_txt.FieldSchema(name:value, type:int, comment:null), ] +PREHOOK: query: insert into DECIMAL_UDF2_n0 values (NULL, NULL) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@decimal_udf2_n0 +POSTHOOK: query: insert into DECIMAL_UDF2_n0 values (NULL, NULL) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@decimal_udf2_n0 +POSTHOOK: Lineage: decimal_udf2_n0.key EXPRESSION [] +POSTHOOK: Lineage: decimal_udf2_n0.value EXPRESSION [] +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf2_n0 + filterExpr: (key = 10) (type: boolean) + Statistics: Num rows: 39 Data size: 4032 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: null (type: double), null (type: double), 1.4711276743037347D (type: double), -0.8390715290764524D (type: double), -0.5440211108893698D (type: double), 0.6483608274590866D (type: double), 0.17453292519943295D (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [3, 4, 5, 6, 7, 8, 9] + selectExpressions: ConstantVectorExpression(val null) -> 3:double, ConstantVectorExpression(val null) -> 4:double, ConstantVectorExpression(val 1.4711276743037347) -> 5:double, ConstantVectorExpression(val -0.8390715290764524) -> 6:double, ConstantVectorExpression(val -0.5440211108893698) -> 7:double, ConstantVectorExpression(val 0.6483608274590866) -> 8:double, ConstantVectorExpression(val 0.17453292519943295) -> 9:double + Statistics: Num rows: 2 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 2 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [double, double, double, double, double, double, double] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +POSTHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +NULL NULL 1.4711276743037347 -0.8390715290764524 -0.5440211108893698 0.6483608274590866 0.17453292519943295 +PREHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_n0) q +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_n0) q +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +-3806952922 +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf2_n0 + filterExpr: (key = 10) (type: boolean) + Statistics: Num rows: 39 Data size: 4188 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 2 Data size: 232 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 22026.465794806718D (type: double), 2.302585092994046D (type: double), 2.302585092994046D (type: double), 1.0D (type: double), log(10, value) (type: double), log(value, 10) (type: double), 1.0D (type: double), 3.1622776601683795D (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: ConstantVectorExpression(val 22026.465794806718) -> 3:double, ConstantVectorExpression(val 2.302585092994046) -> 4:double, ConstantVectorExpression(val 2.302585092994046) -> 5:double, ConstantVectorExpression(val 1.0) -> 6:double, FuncLogWithBaseLongToDouble(col 1:double) -> 7:double, VectorUDFAdaptor(log(value, 10)) -> 8:double, ConstantVectorExpression(val 1.0) -> 9:double, ConstantVectorExpression(val 3.1622776601683795) -> 10:double + Statistics: Num rows: 2 Data size: 128 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 2 Data size: 128 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [double, double, double, double, double, double, double, double] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +POSTHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_n0 WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +22026.465794806718 2.302585092994046 2.302585092994046 1.0 1.0 1.0 1.0 3.1622776601683795 +PREHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_n0) q +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_n0) q +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_n0 +#### A masked pattern was here #### +1514360349 +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf2_txt + filterExpr: (key = 10) (type: boolean) + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: null (type: double), null (type: double), 1.4711276743037347D (type: double), -0.8390715290764524D (type: double), -0.5440211108893698D (type: double), 0.6483608274590866D (type: double), 0.17453292519943295D (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [3, 4, 5, 6, 7, 8, 9] + selectExpressions: ConstantVectorExpression(val null) -> 3:double, ConstantVectorExpression(val null) -> 4:double, ConstantVectorExpression(val 1.4711276743037347) -> 5:double, ConstantVectorExpression(val -0.8390715290764524) -> 6:double, ConstantVectorExpression(val -0.5440211108893698) -> 7:double, ConstantVectorExpression(val 0.6483608274590866) -> 8:double, ConstantVectorExpression(val 0.17453292519943295) -> 9:double + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [double, double, double, double, double, double, double] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +POSTHOOK: query: SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +NULL NULL 1.4711276743037347 -0.8390715290764524 -0.5440211108893698 0.6483608274590866 0.17453292519943295 +PREHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_txt) q +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT acos(key), asin(key), atan(key), cos(key), sin(key), tan(key), radians(key) +FROM DECIMAL_UDF2_txt) q +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +-3806952922 +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: decimal_udf2_txt + filterExpr: (key = 10) (type: boolean) + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:decimal(14,5)/DECIMAL_64, 1:value:int, 2:ROW__ID:struct] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicateExpression: FilterDecimal64ColEqualDecimal64Scalar(col 0:decimal(14,5)/DECIMAL_64, val 1000000) + predicate: (key = 10) (type: boolean) + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 22026.465794806718D (type: double), 2.302585092994046D (type: double), 2.302585092994046D (type: double), 1.0D (type: double), log(10, value) (type: double), log(value, 10) (type: double), 1.0D (type: double), 3.1622776601683795D (type: double) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [3, 4, 5, 6, 7, 8, 9, 10] + selectExpressions: ConstantVectorExpression(val 22026.465794806718) -> 3:double, ConstantVectorExpression(val 2.302585092994046) -> 4:double, ConstantVectorExpression(val 2.302585092994046) -> 5:double, ConstantVectorExpression(val 1.0) -> 6:double, FuncLogWithBaseLongToDouble(col 1:double) -> 7:double, VectorUDFAdaptor(log(value, 10)) -> 8:double, ConstantVectorExpression(val 1.0) -> 9:double, ConstantVectorExpression(val 3.1622776601683795) -> 10:double + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:decimal(14,5)/DECIMAL_64, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [double, double, double, double, double, double, double, double] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +POSTHOOK: query: SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_txt WHERE key = 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +22026.465794806718 2.302585092994046 2.302585092994046 1.0 1.0 1.0 1.0 3.1622776601683795 +PREHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_txt) q +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(*)) +FROM (SELECT + exp(key), ln(key), + log(key), log(key, key), log(key, value), log(value, key), + log10(key), sqrt(key) +FROM DECIMAL_UDF2_txt) q +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_udf2_txt +#### A masked pattern was here #### +1514360349 +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf2_txt +PREHOOK: Output: default@decimal_udf2_txt +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_txt +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf2_txt +POSTHOOK: Output: default@decimal_udf2_txt +PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_n0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@decimal_udf2_n0 +PREHOOK: Output: default@decimal_udf2_n0 +POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_UDF2_n0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@decimal_udf2_n0 +POSTHOOK: Output: default@decimal_udf2_n0 diff --git ql/src/test/results/clientpositive/vector_left_outer_join2.q.out ql/src/test/results/clientpositive/vector_left_outer_join2.q.out new file mode 100644 index 0000000000..485c229a9a --- /dev/null +++ ql/src/test/results/clientpositive/vector_left_outer_join2.q.out @@ -0,0 +1,862 @@ +PREHOOK: query: drop table if exists TJOIN1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists TJOIN1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table if exists TJOIN2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists TJOIN2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin1 +POSTHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin1 +PREHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin2 +POSTHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin2 +PREHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin1stage +POSTHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin1stage +PREHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin2stage +POSTHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin2stage +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin1.txt' OVERWRITE INTO TABLE TJOIN1STAGE +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin1stage +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin1.txt' OVERWRITE INTO TABLE TJOIN1STAGE +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin1stage +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin2.txt' OVERWRITE INTO TABLE TJOIN2STAGE +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin2stage +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin2.txt' OVERWRITE INTO TABLE TJOIN2STAGE +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin2stage +PREHOOK: query: INSERT INTO TABLE TJOIN1 SELECT * from TJOIN1STAGE +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1stage +PREHOOK: Output: default@tjoin1 +POSTHOOK: query: INSERT INTO TABLE TJOIN1 SELECT * from TJOIN1STAGE +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1stage +POSTHOOK: Output: default@tjoin1 +POSTHOOK: Lineage: tjoin1.c1 SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin1.c2 EXPRESSION [(tjoin1stage)tjoin1stage.FieldSchema(name:c2, type:char(2), comment:null), ] +POSTHOOK: Lineage: tjoin1.rnum SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:rnum, type:int, comment:null), ] +_col0 _col1 _col2 +PREHOOK: query: INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin2stage +PREHOOK: Output: default@tjoin2 +POSTHOOK: query: INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin2stage +POSTHOOK: Output: default@tjoin2 +POSTHOOK: Lineage: tjoin2.c1 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin2.c2 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c2, type:char(2), comment:null), ] +POSTHOOK: Lineage: tjoin2.rnum SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:rnum, type:int, comment:null), ] +tjoin2stage.rnum tjoin2stage.c1 tjoin2stage.c2 +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tjoin2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tjoin2 + TableScan + alias: tjoin2 + filterExpr: c1 is not null (type: boolean) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: c1 is not null (type: boolean) + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int), (c2 > 15) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col5 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col5 (type: char(2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 +0 10 15 NULL +1 20 25 NULL +2 NULL 50 NULL +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tjoin2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tjoin2 + TableScan + alias: tjoin2 + filterExpr: c1 is not null (type: boolean) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: c1 is not null (type: boolean) + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int), (c2 > 15) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col5 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col5 (type: char(2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 +0 10 15 NULL +1 20 25 NULL +2 NULL 50 NULL +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tjoin2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tjoin2 + TableScan + alias: tjoin2 + filterExpr: c1 is not null (type: boolean) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: c1 is not null (type: boolean) + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int), (c2 > 15) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 4] + selectExpressions: LongColGreaterLongScalar(col 2:int, val 15) -> 4:boolean + Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + bigTableValueExpressions: col 0:int, col 1:int, col 2:int + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col1, _col2, _col5 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col5 (type: char(2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, string] + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 +0 10 15 NULL +1 20 25 NULL +2 NULL 50 NULL +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tjoin2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tjoin2 + TableScan + alias: tjoin2 + filterExpr: c1 is not null (type: boolean) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: c1 is not null (type: boolean) + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int), (c2 > 15) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 4] + selectExpressions: LongColGreaterLongScalar(col 2:int, val 15) -> 4:boolean + Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + bigTableValueExpressions: col 0:int, col 1:int, col 2:int + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col1, _col2, _col5 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col5 (type: char(2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, string] + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 +0 10 15 NULL +1 20 25 NULL +2 NULL 50 NULL +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tjoin2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tjoin2 + TableScan + alias: tjoin2 + filterExpr: c1 is not null (type: boolean) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: c1 is not null (type: boolean) + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int), (c2 > 15) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 4] + selectExpressions: LongColGreaterLongScalar(col 2:int, val 15) -> 4:boolean + Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + bigTableValueExpressions: col 0:int, col 1:int, col 2:int + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col1, _col2, _col5 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col5 (type: char(2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, string] + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 +0 10 15 NULL +1 20 25 NULL +2 NULL 50 NULL +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tjoin2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tjoin2 + TableScan + alias: tjoin2 + filterExpr: c1 is not null (type: boolean) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: c1 is not null (type: boolean) + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 270 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int), (c2 > 15) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 4] + selectExpressions: LongColGreaterLongScalar(col 2:int, val 15) -> 4:boolean + Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col3} + 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + bigTableValueExpressions: col 0:int, col 1:int, col 2:int + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col1, _col2, _col5 + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col5 (type: char(2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 4 Data size: 392 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, string] + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 +0 10 15 NULL +1 20 25 NULL +2 NULL 50 NULL diff --git ql/src/test/results/clientpositive/vector_outer_join6.q.out ql/src/test/results/clientpositive/vector_outer_join6.q.out new file mode 100644 index 0000000000..3236e1b5c8 --- /dev/null +++ ql/src/test/results/clientpositive/vector_outer_join6.q.out @@ -0,0 +1,194 @@ +PREHOOK: query: create table TJOIN1_txt (RNUM int , C1 int, C2 int) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin1_txt +POSTHOOK: query: create table TJOIN1_txt (RNUM int , C1 int, C2 int) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin1_txt +PREHOOK: query: create table TJOIN2_txt (RNUM int , C1 int, C2 char(2)) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin2_txt +POSTHOOK: query: create table TJOIN2_txt (RNUM int , C1 int, C2 char(2)) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin2_txt +PREHOOK: query: create table if not exists TJOIN3_txt (RNUM int , C1 int, C2 char(2)) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin3_txt +POSTHOOK: query: create table if not exists TJOIN3_txt (RNUM int , C1 int, C2 char(2)) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin3_txt +PREHOOK: query: create table TJOIN4_txt (RNUM int , C1 int, C2 char(2)) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin4_txt +POSTHOOK: query: create table TJOIN4_txt (RNUM int , C1 int, C2 char(2)) +ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin4_txt +PREHOOK: query: load data local inpath '../../data/files/TJOIN1' into table TJOIN1_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin1_txt +POSTHOOK: query: load data local inpath '../../data/files/TJOIN1' into table TJOIN1_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin1_txt +PREHOOK: query: load data local inpath '../../data/files/TJOIN2' into table TJOIN2_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin2_txt +POSTHOOK: query: load data local inpath '../../data/files/TJOIN2' into table TJOIN2_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin2_txt +PREHOOK: query: load data local inpath '../../data/files/TJOIN3' into table TJOIN3_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin3_txt +POSTHOOK: query: load data local inpath '../../data/files/TJOIN3' into table TJOIN3_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin3_txt +PREHOOK: query: load data local inpath '../../data/files/TJOIN4' into table TJOIN4_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin4_txt +POSTHOOK: query: load data local inpath '../../data/files/TJOIN4' into table TJOIN4_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin4_txt +PREHOOK: query: create table TJOIN1_n0 stored as orc AS SELECT * FROM TJOIN1_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@tjoin1_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin1_n0 +POSTHOOK: query: create table TJOIN1_n0 stored as orc AS SELECT * FROM TJOIN1_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@tjoin1_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin1_n0 +POSTHOOK: Lineage: tjoin1_n0.c1 SIMPLE [(tjoin1_txt)tjoin1_txt.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin1_n0.c2 SIMPLE [(tjoin1_txt)tjoin1_txt.FieldSchema(name:c2, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin1_n0.rnum SIMPLE [(tjoin1_txt)tjoin1_txt.FieldSchema(name:rnum, type:int, comment:null), ] +PREHOOK: query: create table TJOIN2_n0 stored as orc AS SELECT * FROM TJOIN2_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@tjoin2_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin2_n0 +POSTHOOK: query: create table TJOIN2_n0 stored as orc AS SELECT * FROM TJOIN2_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@tjoin2_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin2_n0 +POSTHOOK: Lineage: tjoin2_n0.c1 SIMPLE [(tjoin2_txt)tjoin2_txt.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin2_n0.c2 SIMPLE [(tjoin2_txt)tjoin2_txt.FieldSchema(name:c2, type:char(2), comment:null), ] +POSTHOOK: Lineage: tjoin2_n0.rnum SIMPLE [(tjoin2_txt)tjoin2_txt.FieldSchema(name:rnum, type:int, comment:null), ] +PREHOOK: query: create table TJOIN3 stored as orc AS SELECT * FROM TJOIN3_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@tjoin3_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin3 +POSTHOOK: query: create table TJOIN3 stored as orc AS SELECT * FROM TJOIN3_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@tjoin3_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin3 +POSTHOOK: Lineage: tjoin3.c1 SIMPLE [(tjoin3_txt)tjoin3_txt.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin3.c2 SIMPLE [(tjoin3_txt)tjoin3_txt.FieldSchema(name:c2, type:char(2), comment:null), ] +POSTHOOK: Lineage: tjoin3.rnum SIMPLE [(tjoin3_txt)tjoin3_txt.FieldSchema(name:rnum, type:int, comment:null), ] +PREHOOK: query: create table TJOIN4 stored as orc AS SELECT * FROM TJOIN4_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@tjoin4_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@tjoin4 +POSTHOOK: query: create table TJOIN4 stored as orc AS SELECT * FROM TJOIN4_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@tjoin4_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tjoin4 +POSTHOOK: Lineage: tjoin4.c1 SIMPLE [(tjoin4_txt)tjoin4_txt.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin4.c2 SIMPLE [(tjoin4_txt)tjoin4_txt.FieldSchema(name:c2, type:char(2), comment:null), ] +POSTHOOK: Lineage: tjoin4.rnum SIMPLE [(tjoin4_txt)tjoin4_txt.FieldSchema(name:rnum, type:int, comment:null), ] +PREHOOK: query: explain vectorization detail formatted +select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1_n0 +PREHOOK: Input: default@tjoin2_n0 +PREHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail formatted +select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1_n0 +POSTHOOK: Input: default@tjoin2_n0 +POSTHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +{"CBOPlan":"{\n \"rels\": [\n {\n \"id\": \"0\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan\",\n \"table\": [\n \"default\",\n \"tjoin1_n0\"\n ],\n \"table:alias\": \"tjoin1_n0\",\n \"inputs\": [],\n \"rowCount\": 3.0,\n \"avgRowSize\": 8.0,\n \"rowType\": [\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"rnum\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c1\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c2\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"BLOCK__OFFSET__INSIDE__FILE\"\n },\n {\n \"type\": \"VARCHAR\",\n \"nullable\": true,\n \"precision\": 2147483647,\n \"name\": \"INPUT__FILE__NAME\"\n },\n {\n \"fields\": [\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"writeid\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"bucketid\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"rowid\"\n }\n ],\n \"name\": \"ROW__ID\"\n }\n ],\n \"colStats\": [\n {\n \"name\": \"rnum\",\n \"ndv\": 3,\n \"minValue\": 0,\n \"maxValue\": 2\n },\n {\n \"name\": \"c1\",\n \"ndv\": 2,\n \"minValue\": 10,\n \"maxValue\": 20\n },\n {\n \"name\": \"c2\",\n \"ndv\": 3,\n \"minValue\": 15,\n \"maxValue\": 50\n }\n ]\n },\n {\n \"id\": \"1\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"rnum\",\n \"c1\"\n ],\n \"exprs\": [\n {\n \"input\": 0,\n \"name\": \"$0\"\n },\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ],\n \"rowCount\": 3.0\n },\n {\n \"id\": \"2\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan\",\n \"table\": [\n \"default\",\n \"tjoin2_n0\"\n ],\n \"table:alias\": \"tjoin2_n0\",\n \"inputs\": [],\n \"rowCount\": 4.0,\n \"avgRowSize\": 8.0,\n \"rowType\": [\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"rnum\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c1\"\n },\n {\n \"type\": \"CHAR\",\n \"nullable\": true,\n \"precision\": 2,\n \"name\": \"c2\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"BLOCK__OFFSET__INSIDE__FILE\"\n },\n {\n \"type\": \"VARCHAR\",\n \"nullable\": true,\n \"precision\": 2147483647,\n \"name\": \"INPUT__FILE__NAME\"\n },\n {\n \"fields\": [\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"writeid\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"bucketid\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"rowid\"\n }\n ],\n \"name\": \"ROW__ID\"\n }\n ],\n \"colStats\": [\n {\n \"name\": \"rnum\",\n \"ndv\": 4,\n \"minValue\": 0,\n \"maxValue\": 3\n },\n {\n \"name\": \"c1\",\n \"ndv\": 2,\n \"minValue\": 10,\n \"maxValue\": 15\n },\n {\n \"name\": \"c2\",\n \"ndv\": 4\n }\n ]\n },\n {\n \"id\": \"3\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter\",\n \"condition\": {\n \"op\": {\n \"name\": \"IS NOT NULL\",\n \"kind\": \"IS_NOT_NULL\",\n \"syntax\": \"POSTFIX\"\n },\n \"operands\": [\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ]\n },\n \"rowCount\": 3.0\n },\n {\n \"id\": \"4\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"rnum\",\n \"c1\"\n ],\n \"exprs\": [\n {\n \"input\": 0,\n \"name\": \"$0\"\n },\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ],\n \"rowCount\": 3.0\n },\n {\n \"id\": \"5\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin\",\n \"condition\": {\n \"op\": {\n \"name\": \"=\",\n \"kind\": \"EQUALS\",\n \"syntax\": \"BINARY\"\n },\n \"operands\": [\n {\n \"input\": 1,\n \"name\": \"$1\"\n },\n {\n \"input\": 3,\n \"name\": \"$3\"\n }\n ]\n },\n \"joinType\": \"left\",\n \"algorithm\": \"none\",\n \"cost\": \"not available\",\n \"inputs\": [\n \"1\",\n \"4\"\n ],\n \"rowCount\": 4.5\n },\n {\n \"id\": \"6\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan\",\n \"table\": [\n \"default\",\n \"tjoin3\"\n ],\n \"table:alias\": \"tjoin3\",\n \"inputs\": [],\n \"rowCount\": 2.0,\n \"avgRowSize\": 8.0,\n \"rowType\": [\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"rnum\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c1\"\n },\n {\n \"type\": \"CHAR\",\n \"nullable\": true,\n \"precision\": 2,\n \"name\": \"c2\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"BLOCK__OFFSET__INSIDE__FILE\"\n },\n {\n \"type\": \"VARCHAR\",\n \"nullable\": true,\n \"precision\": 2147483647,\n \"name\": \"INPUT__FILE__NAME\"\n },\n {\n \"fields\": [\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"writeid\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"bucketid\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"rowid\"\n }\n ],\n \"name\": \"ROW__ID\"\n }\n ],\n \"colStats\": [\n {\n \"name\": \"rnum\",\n \"ndv\": 2,\n \"minValue\": 0,\n \"maxValue\": 1\n },\n {\n \"name\": \"c1\",\n \"ndv\": 2,\n \"minValue\": 10,\n \"maxValue\": 15\n },\n {\n \"name\": \"c2\",\n \"ndv\": 2\n }\n ]\n },\n {\n \"id\": \"7\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter\",\n \"condition\": {\n \"op\": {\n \"name\": \"IS NOT NULL\",\n \"kind\": \"IS_NOT_NULL\",\n \"syntax\": \"POSTFIX\"\n },\n \"operands\": [\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ]\n },\n \"rowCount\": 2.0\n },\n {\n \"id\": \"8\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"rnum\",\n \"c1\"\n ],\n \"exprs\": [\n {\n \"input\": 0,\n \"name\": \"$0\"\n },\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ],\n \"rowCount\": 2.0\n },\n {\n \"id\": \"9\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin\",\n \"condition\": {\n \"op\": {\n \"name\": \"=\",\n \"kind\": \"EQUALS\",\n \"syntax\": \"BINARY\"\n },\n \"operands\": [\n {\n \"input\": 3,\n \"name\": \"$3\"\n },\n {\n \"input\": 5,\n \"name\": \"$5\"\n }\n ]\n },\n \"joinType\": \"left\",\n \"algorithm\": \"none\",\n \"cost\": \"not available\",\n \"inputs\": [\n \"5\",\n \"8\"\n ],\n \"rowCount\": 4.5\n },\n {\n \"id\": \"10\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"tj1rnum\",\n \"tj2rnum\",\n \"rnumt3\"\n ],\n \"exprs\": [\n {\n \"input\": 0,\n \"name\": \"$0\"\n },\n {\n \"input\": 2,\n \"name\": \"$2\"\n },\n {\n \"input\": 4,\n \"name\": \"$4\"\n }\n ],\n \"rowCount\": 4.5\n }\n ]\n}","optimizedSQL":"SELECT `t`.`rnum` AS `tj1rnum`, `t1`.`rnum` AS `tj2rnum`, `t3`.`rnum` AS `rnumt3`\nFROM (SELECT `rnum`, `c1`\nFROM `default`.`tjoin1_n0`) AS `t`\nLEFT JOIN (SELECT `rnum`, `c1`\nFROM `default`.`tjoin2_n0`\nWHERE `c1` IS NOT NULL) AS `t1` ON `t`.`c1` = `t1`.`c1`\nLEFT JOIN (SELECT `rnum`, `c1`\nFROM `default`.`tjoin3`\nWHERE `c1` IS NOT NULL) AS `t3` ON `t1`.`c1` = `t3`.`c1`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:tjoin2_n0":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:tjoin2_n0":{"TableScan":{"alias:":"tjoin2_n0","columns:":["rnum","c1"],"database:":"default","filterExpr:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE","table:":"tjoin2_n0","isTempTable:":"false","OperatorId:":"TS_2","children":{"Filter Operator":{"predicate:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"FIL_16","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_4","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}},"$hdt$_2:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["rnum","c1"],"database:":"default","filterExpr:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_5","children":{"Filter Operator":{"predicate:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"FIL_17","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_7","children":{"HashTable Sink Operator":{"keys:":{"0":"_col3 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_22"}}}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","table:":"tjoin1_n0","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_26","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"1:_col0","_col3":"1:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2","_col3"],"Statistics:":"Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"MAPJOIN_27","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2","_col4":"1:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col3 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:int","col 1:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2","_col4"],"Statistics:":"Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"MAPJOIN_28","children":{"Select Operator":{"expressions:":"_col0 (type: int), _col2 (type: int), _col4 (type: int)","columnExprMap:":{"_col0":"_col0","_col1":"_col2","_col2":"_col4"},"outputColumnNames:":["_col0","_col1","_col2"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2]"},"Statistics:":"Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_29","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_30"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_31"}}}}}} +PREHOOK: query: select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1_n0 +PREHOOK: Input: default@tjoin2_n0 +PREHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +POSTHOOK: query: select tj1rnum, tj2rnum, tjoin3.rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1_n0 +POSTHOOK: Input: default@tjoin2_n0 +POSTHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +0 0 0 +0 3 0 +1 NULL NULL +2 NULL NULL +PREHOOK: query: explain vectorization detail formatted +select tj1rnum, tj2rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1_n0 +PREHOOK: Input: default@tjoin2_n0 +PREHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +POSTHOOK: query: explain vectorization detail formatted +select tj1rnum, tj2rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1_n0 +POSTHOOK: Input: default@tjoin2_n0 +POSTHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +{"CBOPlan":"{\n \"rels\": [\n {\n \"id\": \"0\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan\",\n \"table\": [\n \"default\",\n \"tjoin1_n0\"\n ],\n \"table:alias\": \"tjoin1_n0\",\n \"inputs\": [],\n \"rowCount\": 3.0,\n \"avgRowSize\": 8.0,\n \"rowType\": [\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"rnum\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c1\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c2\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"BLOCK__OFFSET__INSIDE__FILE\"\n },\n {\n \"type\": \"VARCHAR\",\n \"nullable\": true,\n \"precision\": 2147483647,\n \"name\": \"INPUT__FILE__NAME\"\n },\n {\n \"fields\": [\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"writeid\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"bucketid\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"rowid\"\n }\n ],\n \"name\": \"ROW__ID\"\n }\n ],\n \"colStats\": [\n {\n \"name\": \"rnum\",\n \"ndv\": 3,\n \"minValue\": 0,\n \"maxValue\": 2\n },\n {\n \"name\": \"c1\",\n \"ndv\": 2,\n \"minValue\": 10,\n \"maxValue\": 20\n },\n {\n \"name\": \"c2\",\n \"ndv\": 3,\n \"minValue\": 15,\n \"maxValue\": 50\n }\n ]\n },\n {\n \"id\": \"1\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"rnum\",\n \"c1\"\n ],\n \"exprs\": [\n {\n \"input\": 0,\n \"name\": \"$0\"\n },\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ],\n \"rowCount\": 3.0\n },\n {\n \"id\": \"2\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan\",\n \"table\": [\n \"default\",\n \"tjoin2_n0\"\n ],\n \"table:alias\": \"tjoin2_n0\",\n \"inputs\": [],\n \"rowCount\": 4.0,\n \"avgRowSize\": 8.0,\n \"rowType\": [\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"rnum\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c1\"\n },\n {\n \"type\": \"CHAR\",\n \"nullable\": true,\n \"precision\": 2,\n \"name\": \"c2\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"BLOCK__OFFSET__INSIDE__FILE\"\n },\n {\n \"type\": \"VARCHAR\",\n \"nullable\": true,\n \"precision\": 2147483647,\n \"name\": \"INPUT__FILE__NAME\"\n },\n {\n \"fields\": [\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"writeid\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"bucketid\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"rowid\"\n }\n ],\n \"name\": \"ROW__ID\"\n }\n ],\n \"colStats\": [\n {\n \"name\": \"rnum\",\n \"ndv\": 4,\n \"minValue\": 0,\n \"maxValue\": 3\n },\n {\n \"name\": \"c1\",\n \"ndv\": 2,\n \"minValue\": 10,\n \"maxValue\": 15\n },\n {\n \"name\": \"c2\",\n \"ndv\": 4\n }\n ]\n },\n {\n \"id\": \"3\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter\",\n \"condition\": {\n \"op\": {\n \"name\": \"IS NOT NULL\",\n \"kind\": \"IS_NOT_NULL\",\n \"syntax\": \"POSTFIX\"\n },\n \"operands\": [\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ]\n },\n \"rowCount\": 3.0\n },\n {\n \"id\": \"4\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"rnum\",\n \"c1\"\n ],\n \"exprs\": [\n {\n \"input\": 0,\n \"name\": \"$0\"\n },\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ],\n \"rowCount\": 3.0\n },\n {\n \"id\": \"5\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin\",\n \"condition\": {\n \"op\": {\n \"name\": \"=\",\n \"kind\": \"EQUALS\",\n \"syntax\": \"BINARY\"\n },\n \"operands\": [\n {\n \"input\": 1,\n \"name\": \"$1\"\n },\n {\n \"input\": 3,\n \"name\": \"$3\"\n }\n ]\n },\n \"joinType\": \"left\",\n \"algorithm\": \"none\",\n \"cost\": \"not available\",\n \"inputs\": [\n \"1\",\n \"4\"\n ],\n \"rowCount\": 4.5\n },\n {\n \"id\": \"6\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan\",\n \"table\": [\n \"default\",\n \"tjoin3\"\n ],\n \"table:alias\": \"tjoin3\",\n \"inputs\": [],\n \"rowCount\": 2.0,\n \"avgRowSize\": 4.0,\n \"rowType\": [\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"rnum\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"c1\"\n },\n {\n \"type\": \"CHAR\",\n \"nullable\": true,\n \"precision\": 2,\n \"name\": \"c2\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"BLOCK__OFFSET__INSIDE__FILE\"\n },\n {\n \"type\": \"VARCHAR\",\n \"nullable\": true,\n \"precision\": 2147483647,\n \"name\": \"INPUT__FILE__NAME\"\n },\n {\n \"fields\": [\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"writeid\"\n },\n {\n \"type\": \"INTEGER\",\n \"nullable\": true,\n \"name\": \"bucketid\"\n },\n {\n \"type\": \"BIGINT\",\n \"nullable\": true,\n \"name\": \"rowid\"\n }\n ],\n \"name\": \"ROW__ID\"\n }\n ],\n \"colStats\": [\n {\n \"name\": \"c1\",\n \"ndv\": 2,\n \"minValue\": 10,\n \"maxValue\": 15\n },\n {\n \"name\": \"rnum\",\n \"ndv\": 2,\n \"minValue\": 0,\n \"maxValue\": 1\n },\n {\n \"name\": \"c2\",\n \"ndv\": 2\n }\n ]\n },\n {\n \"id\": \"7\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter\",\n \"condition\": {\n \"op\": {\n \"name\": \"IS NOT NULL\",\n \"kind\": \"IS_NOT_NULL\",\n \"syntax\": \"POSTFIX\"\n },\n \"operands\": [\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ]\n },\n \"rowCount\": 2.0\n },\n {\n \"id\": \"8\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"c1\"\n ],\n \"exprs\": [\n {\n \"input\": 1,\n \"name\": \"$1\"\n }\n ],\n \"rowCount\": 2.0\n },\n {\n \"id\": \"9\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin\",\n \"condition\": {\n \"op\": {\n \"name\": \"=\",\n \"kind\": \"EQUALS\",\n \"syntax\": \"BINARY\"\n },\n \"operands\": [\n {\n \"input\": 3,\n \"name\": \"$3\"\n },\n {\n \"input\": 4,\n \"name\": \"$4\"\n }\n ]\n },\n \"joinType\": \"left\",\n \"algorithm\": \"none\",\n \"cost\": \"not available\",\n \"inputs\": [\n \"5\",\n \"8\"\n ],\n \"rowCount\": 4.5\n },\n {\n \"id\": \"10\",\n \"relOp\": \"org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject\",\n \"fields\": [\n \"tj1rnum\",\n \"rnumt3\"\n ],\n \"exprs\": [\n {\n \"input\": 0,\n \"name\": \"$0\"\n },\n {\n \"input\": 2,\n \"name\": \"$2\"\n }\n ],\n \"rowCount\": 4.5\n }\n ]\n}","optimizedSQL":"SELECT `t`.`rnum` AS `tj1rnum`, `t1`.`rnum` AS `rnumt3`\nFROM (SELECT `rnum`, `c1`\nFROM `default`.`tjoin1_n0`) AS `t`\nLEFT JOIN (SELECT `rnum`, `c1`\nFROM `default`.`tjoin2_n0`\nWHERE `c1` IS NOT NULL) AS `t1` ON `t`.`c1` = `t1`.`c1`\nLEFT JOIN (SELECT `c1`\nFROM `default`.`tjoin3`\nWHERE `c1` IS NOT NULL) AS `t3` ON `t1`.`c1` = `t3`.`c1`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-7":{"ROOT STAGE":"TRUE"},"Stage-5":{"DEPENDENT STAGES":"Stage-7"},"Stage-0":{"DEPENDENT STAGES":"Stage-5"}},"STAGE PLANS":{"Stage-7":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:tjoin2_n0":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:tjoin3":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:tjoin2_n0":{"TableScan":{"alias:":"tjoin2_n0","columns:":["rnum","c1"],"database:":"default","filterExpr:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE","table:":"tjoin2_n0","isTempTable:":"false","OperatorId:":"TS_2","children":{"Filter Operator":{"predicate:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"FIL_16","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_4","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}},"$hdt$_2:tjoin3":{"TableScan":{"alias:":"tjoin3","columns:":["c1"],"database:":"default","filterExpr:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","table:":"tjoin3","isTempTable:":"false","OperatorId:":"TS_5","children":{"Filter Operator":{"predicate:":"c1 is not null (type: boolean)","Statistics:":"Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"FIL_17","children":{"Select Operator":{"expressions:":"c1 (type: int)","columnExprMap:":{"_col0":"c1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_7","children":{"HashTable Sink Operator":{"keys:":{"0":"_col3 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_22"}}}}}}}}}}},"Stage-5":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"tjoin1_n0","columns:":["rnum","c1"],"database:":"default","Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","table:":"tjoin1_n0","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"rnum (type: int), c1 (type: int)","columnExprMap:":{"_col0":"rnum","_col1":"c1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_26","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"1:_col0","_col3":"1:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col1 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 1:int"],"bigTableValueExpressions:":["col 0:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2","_col3"],"Statistics:":"Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"MAPJOIN_27","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col3 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:int","col 1:int"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"MAPJOIN_28","children":{"Select Operator":{"expressions:":"_col0 (type: int), _col2 (type: int)","columnExprMap:":{"_col0":"_col0","_col1":"_col2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1]"},"Statistics:":"Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE","OperatorId:":"SEL_29","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_30"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"3","includeColumns:":"[0, 1]","dataColumns:":["rnum:int","c1:int","c2:int"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_31"}}}}}} +PREHOOK: query: select tj1rnum, tj2rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1_n0 +PREHOOK: Input: default@tjoin2_n0 +PREHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +POSTHOOK: query: select tj1rnum, tj2rnum as rnumt3 from + (select tjoin1_n0.rnum tj1rnum, tjoin2_n0.rnum tj2rnum, tjoin2_n0.c1 tj2c1 from tjoin1_n0 left outer join tjoin2_n0 on tjoin1_n0.c1 = tjoin2_n0.c1 ) tj left outer join tjoin3 on tj2c1 = tjoin3.c1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1_n0 +POSTHOOK: Input: default@tjoin2_n0 +POSTHOOK: Input: default@tjoin3 +#### A masked pattern was here #### +0 0 +0 3 +1 NULL +2 NULL diff --git ql/src/test/results/clientpositive/vector_tablesample_rows.q.out ql/src/test/results/clientpositive/vector_tablesample_rows.q.out index 953e0934c5..7eed938f56 100644 --- ql/src/test/results/clientpositive/vector_tablesample_rows.q.out +++ ql/src/test/results/clientpositive/vector_tablesample_rows.q.out @@ -387,11 +387,11 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: _c0 int - name: default.dual input format: org.apache.hadoop.mapred.TextInputFormat #### A masked pattern was here #### output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.dual isTemporary: true Stage: Stage-2 diff --git ql/src/test/results/clientpositive/windowing_streaming.q.out ql/src/test/results/clientpositive/windowing_streaming.q.out index ec24ce4dfb..15a739a48b 100644 --- ql/src/test/results/clientpositive/windowing_streaming.q.out +++ ql/src/test/results/clientpositive/windowing_streaming.q.out @@ -412,13 +412,13 @@ select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default -PREHOOK: Output: default@sB_n0 +PREHOOK: Output: default@sb_n0 POSTHOOK: query: create table sB_n0 ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE as select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from alltypesorc) a where r < 5 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: database:default -POSTHOOK: Output: default@sB_n0 +POSTHOOK: Output: default@sb_n0 POSTHOOK: Lineage: sb_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: sb_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] POSTHOOK: Lineage: sb_n0.r SCRIPT [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] @@ -445,13 +445,13 @@ select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default -PREHOOK: Output: default@sD_n0 +PREHOOK: Output: default@sd_n0 POSTHOOK: query: create table sD_n0 ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE as select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from alltypesorc) a where r < 5 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@alltypesorc POSTHOOK: Output: database:default -POSTHOOK: Output: default@sD_n0 +POSTHOOK: Output: default@sd_n0 POSTHOOK: Lineage: sd_n0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: sd_n0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] POSTHOOK: Lineage: sd_n0.r SCRIPT [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), (alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] diff --git standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 205c867db1..60018a63d5 100644 --- standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -57,6 +57,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.metastore.api.*; @@ -2107,6 +2108,10 @@ public Table getTable(String catName, String dbName, String tableName, String va return deepCopy(FilterUtils.filterTableIfEnabled(isClientFilterEnabled, filterHook, t)); } + @Override public Table getTable(TableName tableName) throws MetaException, TException, NoSuchObjectException { + return getTable(tableName.getCat(), tableName.getDb(), tableName.getTable()); + } + @Override public List getTableObjectsByName(String dbName, List tableNames) throws TException { diff --git standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index b58b1e4a07..ae8c123c3c 100644 --- standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -29,6 +29,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.common.classification.RetrySemantics; @@ -681,6 +682,22 @@ Table getTable(String dbName, String tableName, boolean getColumnStats, String e */ Table getTable(String catName, String dbName, String tableName) throws MetaException, TException; + /** + * Get a table object in the default catalog. + * + * @param tableName + * The {@link org.apache.hadoop.hive.common.TableName} to fetch. + * @return An object representing the table. + * @throws MetaException + * Could not fetch the table + * @throws TException + * A thrift communication error occurred + * @throws NoSuchObjectException + * In case the table wasn't found. + */ + Table getTable(TableName tableName) throws MetaException, + TException, NoSuchObjectException; + /** * Get a table object. * @param catName catalog the table is in. diff --git standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java index 7092ee869a..2c2c3e7a65 100755 --- standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -356,7 +356,7 @@ public static String getQualifiedName(Partition partition) { * @param table table object * @return fully qualified name. */ - public static String getCatalogQualifiedTableName(Table table) { + public static String getCatalogQualifiedTableName(Table table) { // TODO: deprecate/remove return TableName.getQualified(table.getCatName(), table.getDbName(), table.getTableName()); } diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java index f4e109d1b0..b0dfbe6744 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java @@ -110,15 +110,15 @@ public int repair(MsckInfo msckInfo) { int ret = 0; long partitionExpirySeconds = msckInfo.getPartitionExpirySeconds(); try { - Table table = getMsc().getTable(msckInfo.getCatalogName(), msckInfo.getDbName(), msckInfo.getTableName()); + Table table = getMsc().getTable(msckInfo.getTableName()); qualifiedTableName = Warehouse.getCatalogQualifiedTableName(table); HiveMetaStoreChecker checker = new HiveMetaStoreChecker(getMsc(), getConf(), partitionExpirySeconds); // checkMetastore call will fill in result with partitions that are present in filesystem // and missing in metastore - accessed through getPartitionsNotInMs // And partitions that are not present in filesystem and metadata exists in metastore - // accessed through getPartitionNotOnFS - checker.checkMetastore(msckInfo.getCatalogName(), msckInfo.getDbName(), msckInfo.getTableName(), - msckInfo.getPartSpecs(), result); + checker.checkMetastore(msckInfo.getTableName().getCat(), msckInfo.getTableName().getDb(), + msckInfo.getTableName().getTable(), msckInfo.getPartSpecs(), result); Set partsNotInMs = result.getPartitionsNotInMs(); Set partsNotInFs = result.getPartitionsNotOnFs(); Set expiredPartitions = result.getExpiredPartitions(); @@ -138,7 +138,8 @@ public int repair(MsckInfo msckInfo) { MetaStoreServerUtils.isTransactionalTable(table.getParameters())) { // Running MSCK from beeline/cli will make DDL task acquire X lock when repair is enabled, since we are directly // invoking msck.repair() without SQL statement, we need to do the same and acquire X lock (repair is default) - LockRequest lockRequest = createLockRequest(msckInfo.getDbName(), msckInfo.getTableName()); + LockRequest lockRequest = + createLockRequest(msckInfo.getTableName().getDb(), msckInfo.getTableName().getTable()); txnId = lockRequest.getTxnid(); try { LockResponse res = getMsc().lock(lockRequest); diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java index 25d0c648ae..9c28133c93 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hive.metastore; +import org.apache.hadoop.hive.common.TableName; + import java.util.List; import java.util.Map; @@ -25,9 +27,7 @@ */ public class MsckInfo { - private final String catalogName; - private final String dbName; - private final String tableName; + private final TableName tableName; private final List> partSpecs; private final String resFile; private final boolean repairPartitions; @@ -35,11 +35,9 @@ private final boolean dropPartitions; private final long partitionExpirySeconds; - public MsckInfo(String catalogName, String dbName, String tableName, List> partSpecs, + public MsckInfo(TableName tableName, List> partSpecs, String resFile, boolean repairPartitions, boolean addPartitions, boolean dropPartitions, long partitionExpirySeconds) { - this.catalogName = catalogName; - this.dbName = dbName; this.tableName = tableName; this.partSpecs = partSpecs; this.resFile = resFile; @@ -49,15 +47,7 @@ public MsckInfo(String catalogName, String dbName, String tableName, List getTableObjectsByName(String catName, String dbName, List tableNames) throws MetaException, diff --git storage-api/src/java/org/apache/hadoop/hive/common/TableName.java storage-api/src/java/org/apache/hadoop/hive/common/TableName.java index a0f47c2ebf..fcfb6673de 100644 --- storage-api/src/java/org/apache/hadoop/hive/common/TableName.java +++ storage-api/src/java/org/apache/hadoop/hive/common/TableName.java @@ -36,8 +36,11 @@ private final String cat; private final String db; private final String table; + private final boolean catIsBlank; + private final boolean dbIsBlank; /** + * Creates a TableName object, after applying lowercase to all params. * * @param catName catalog name. Cannot be null. If you do not know it you can get it from * SessionState.getCurrentCatalog() if you want to use the catalog from the current @@ -48,14 +51,39 @@ * @param tableName table name, cannot be null */ public TableName(final String catName, final String dbName, final String tableName) { - this.cat = catName; - this.db = dbName; - this.table = tableName; + if (tableName == null || tableName.isEmpty()) { + throw new IllegalArgumentException(String.join("", "Table value was blank. ", ILL_ARG_EXCEPTION_MSG)); + } + + this.dbIsBlank = dbName == null || dbName.isEmpty(); + this.catIsBlank = catName == null || catName.isEmpty(); + if (dbIsBlank && !catIsBlank) { + throw new IllegalArgumentException( + String.join("", "Invalid values: database was blank, while category wasn't. ", ILL_ARG_EXCEPTION_MSG)); + } + + this.cat = this.catIsBlank ? catName : catName.toLowerCase(); + this.db = this.dbIsBlank ? dbName : dbName.toLowerCase(); + this.table = tableName.toLowerCase(); + } + + /** + * Build a TableName from a string of the form [database.]table. + * @param name name in string form, not null of the form [[catalog.]database.]table. + * @param defaultDatabase default database to use if database is not in the name. If you do + * not now it you can get it from SessionState.getCurrentDatabase() or + * use Warehouse.DEFAULT_DATABASE_NAME. + * @return TableName + * @throws IllegalArgumentException if a non-null name is given + */ + public static TableName fromString(final String name, final String defaultDatabase) + throws IllegalArgumentException { + return fromString(name, null, defaultDatabase); } /** * Build a TableName from a string of the form [[catalog.]database.]table. - * @param name name in string form, not null + * @param name name in string form, not null of the form [[catalog.]database.]table. * @param defaultCatalog default catalog to use if catalog is not in the name. If you do not * know it you can get it from SessionState.getCurrentCatalog() if you * want to use the catalog from the current session, or from @@ -102,9 +130,9 @@ public String getTable() { /** * Get the name in db.table format, for use with stuff not yet converted to use the catalog. * Fair warning, that if the db is null, this will return null.tableName - * @deprecated use {@link #getNotEmptyDbTable()} instead. + * @deprecated use {@link #toString()} instead. */ - // to be @Deprecated + // todo: remove, refactor public String getDbTable() { return db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; } @@ -114,24 +142,27 @@ public String getDbTable() { */ public String getEscapedNotEmptyDbTable() { return - db == null || db.trim().isEmpty() ? + dbIsBlank ? "`" + table + "`" : "`" + db + "`" + DatabaseName.CAT_DB_TABLE_SEPARATOR + "`" + table + "`"; } /** * Get the name in db.table format, if db is not empty, otherwise pass only the table name. */ + @Deprecated public String getNotEmptyDbTable() { - return db == null || db.trim().isEmpty() ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + return db == null || db.isEmpty() ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; } /** * Get the name in db.table format, for use with stuff not yet converted to use the catalog. */ + // todo: this can be quite unsafe public static String getDbTable(String dbName, String tableName) { return dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; } + // todo: this can be quite unsafe public static String getQualified(String catName, String dbName, String tableName) { return catName + DatabaseName.CAT_DB_TABLE_SEPARATOR + dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; } @@ -154,6 +185,10 @@ public static String getQualified(String catName, String dbName, String tableNam @Override public String toString() { - return cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + if (catIsBlank) { + return dbIsBlank ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } else { + return cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } } } diff --git storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java index f19c7358c9..872fb900f8 100644 --- storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java +++ storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java @@ -21,14 +21,51 @@ import org.junit.Test; public class TestTableName { + @Test - public void fullName() { - TableName name = new TableName("cat", "db", "t"); + public void testFullName() { + TableName name = new TableName("CaT", "dB", "TbL"); Assert.assertEquals("cat", name.getCat()); Assert.assertEquals("db", name.getDb()); - Assert.assertEquals("t", name.getTable()); - Assert.assertEquals("cat.db.t", name.toString()); - Assert.assertEquals("db.t", name.getDbTable()); + Assert.assertEquals("tbl", name.getTable()); + Assert.assertEquals("cat.db.tbl", name.toString()); + Assert.assertEquals("db.tbl", name.getDbTable()); + } + + @Test + public void testPartialName() { + TableName name = new TableName(null, "db", "t"); + Assert.assertEquals("db.t", name.toString()); + + name = new TableName(null, null, "t"); + Assert.assertEquals("t", name.toString()); + } + + @Test + public void testIllegalNames() { + try { + new TableName("cat", null, "t"); + Assert.fail(); + } catch (IllegalArgumentException e) { + } + + try { + new TableName("cat", "", "t"); + Assert.fail(); + } catch (IllegalArgumentException e) { + } + + try { + new TableName("cat", "db", null); + Assert.fail(); + } catch (IllegalArgumentException e) { + } + + try { + new TableName("cat", "db", ""); + Assert.fail(); + } catch (IllegalArgumentException e) { + } } @Test @@ -55,16 +92,4 @@ public void fromString() { Assert.assertTrue(true); } } - - @Test - public void testNotEmptyDbTable() { - TableName name = new TableName("cat", "db", "t"); - Assert.assertEquals("db.t", name.getNotEmptyDbTable()); - - name = new TableName("cat", null, "t"); - Assert.assertEquals("t", name.getNotEmptyDbTable()); - - name = new TableName("cat", "", "t"); - Assert.assertEquals("t", name.getNotEmptyDbTable()); - } }