Index: eclipse-templates/TestHiveServer.launchtemplate
===================================================================
--- eclipse-templates/TestHiveServer.launchtemplate (revision 0)
+++ eclipse-templates/TestHiveServer.launchtemplate (revision 0)
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: hwi/build.xml
===================================================================
--- hwi/build.xml (revision 770442)
+++ hwi/build.xml (working copy)
@@ -11,6 +11,17 @@
+
+
+
+
+
+
+
+
+
+
+
Location of the structured hive logs
+
+ hive.exec.pre.hooks
+ org.apache.hadoop.hive.ql.hooks.PreExecutePrinter
+ Pre Execute Hook for Tests
+
+
Index: ql/src/test/results/clientnegative/script_error.q.out
===================================================================
--- ql/src/test/results/clientnegative/script_error.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/script_error.q.out (working copy)
@@ -1,3 +1,6 @@
+query: EXPLAIN
+SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+FROM src
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '../data/scripts/error_script' (TOK_ALIASLIST tkey tvalue))))))
@@ -33,4 +36,8 @@
limit: -1
+query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+FROM src
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/35582581/85175665.10000
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
Index: ql/src/test/results/clientnegative/notable_alias3.q.out
===================================================================
--- ql/src/test/results/clientnegative/notable_alias3.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/notable_alias3.q.out (working copy)
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
FAILED: Error in semantic analysis: line 4:44 Expression Not In Group By Key src
Index: ql/src/test/results/clientnegative/bad_sample_clause.q.out
===================================================================
--- ql/src/test/results/clientnegative/bad_sample_clause.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/bad_sample_clause.q.out (working copy)
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE
FAILED: Error in semantic analysis: Sampling Expression Needed for Non-Bucketed Table srcpart
Index: ql/src/test/results/clientnegative/bad_exec_hooks.q.out
===================================================================
--- ql/src/test/results/clientnegative/bad_exec_hooks.q.out (revision 0)
+++ ql/src/test/results/clientnegative/bad_exec_hooks.q.out (revision 0)
@@ -0,0 +1,2 @@
+Pre Exec Hook Class not found:"org.this.is.a.bad.class"
+FAILED: Unknown exception : "org.this.is.a.bad.class"
Index: ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
===================================================================
--- ql/src/test/results/clientnegative/load_wrong_fileformat.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/load_wrong_fileformat.q.out (working copy)
@@ -1,2 +1,5 @@
+query: DROP TABLE T1
+query: CREATE TABLE T1(name STRING) STORED AS SEQUENCEFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
Failed with exception Cannot load text files into a table stored as SequenceFile.
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask
Index: ql/src/test/results/clientnegative/describe_xpath1.q.out
===================================================================
--- ql/src/test/results/clientnegative/describe_xpath1.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/describe_xpath1.q.out (working copy)
@@ -1,2 +1,3 @@
+query: describe src_thrift.$elem$
FAILED: Error in metadata: java.lang.RuntimeException: cannot find field $elem$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString]
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Index: ql/src/test/results/clientnegative/clusterbydistributeby.q.out
===================================================================
--- ql/src/test/results/clientnegative/clusterbydistributeby.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/clusterbydistributeby.q.out (working copy)
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
FAILED: Error in semantic analysis: line 8:14 Cannot have both Cluster By and Distribute By Clauses tkey
Index: ql/src/test/results/clientnegative/altern1.q.out
===================================================================
--- ql/src/test/results/clientnegative/altern1.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/altern1.q.out (working copy)
@@ -1,2 +1,5 @@
+query: drop table altern1
+query: create table altern1(a int, b int) partitioned by (ds string)
+query: alter table altern1 replace columns(a int, b int, ds string)
Invalid table columns : Partition column name ds conflicts with table columns.
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Index: ql/src/test/results/clientnegative/describe_xpath2.q.out
===================================================================
--- ql/src/test/results/clientnegative/describe_xpath2.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/describe_xpath2.q.out (working copy)
@@ -1,2 +1,3 @@
+query: describe src_thrift.$key$
FAILED: Error in metadata: java.lang.RuntimeException: cannot find field $key$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString]
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Index: ql/src/test/results/clientnegative/describe_xpath3.q.out
===================================================================
--- ql/src/test/results/clientnegative/describe_xpath3.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/describe_xpath3.q.out (working copy)
@@ -1,2 +1,3 @@
+query: describe src_thrift.lint.abc
FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Index: ql/src/test/results/clientnegative/describe_xpath4.q.out
===================================================================
--- ql/src/test/results/clientnegative/describe_xpath4.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/describe_xpath4.q.out (working copy)
@@ -1,2 +1,3 @@
+query: describe src_thrift.mStringString.abc
FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Index: ql/src/test/results/clientnegative/orderbysortby.q.out
===================================================================
--- ql/src/test/results/clientnegative/orderbysortby.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/orderbysortby.q.out (working copy)
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
FAILED: Error in semantic analysis: line 8:8 Cannot have both Order By and Sort By Clauses one
Index: ql/src/test/results/clientnegative/union2.q.out
===================================================================
--- ql/src/test/results/clientnegative/union2.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/union2.q.out (working copy)
@@ -1 +1,5 @@
+query: drop table t1
+query: drop table t2
+query: create table if not exists t1(r string, c string, v string)
+query: create table if not exists t2(s string, c string, v string)
FAILED: Error in semantic analysis: Schema of both sides of union should match: Column v is of type string on first table and type double on second table
Index: ql/src/test/results/clientnegative/clusterbysortby.q.out
===================================================================
--- ql/src/test/results/clientnegative/clusterbysortby.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/clusterbysortby.q.out (working copy)
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
FAILED: Error in semantic analysis: line 8:8 Cannot have both Cluster By and Sort By Clauses one
Index: ql/src/test/results/clientnegative/create_insert_outputformat.q.out
===================================================================
--- ql/src/test/results/clientnegative/create_insert_outputformat.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/create_insert_outputformat.q.out (working copy)
@@ -1 +1,2 @@
+query: DROP TABLE table_test_output_fomat
FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat
Index: ql/src/test/results/clientnegative/fileformat_void_input.q.out
===================================================================
--- ql/src/test/results/clientnegative/fileformat_void_input.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/fileformat_void_input.q.out (working copy)
@@ -1 +1,8 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS
+ INPUTFORMAT 'java.lang.Void'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10
+Input: default/src
+Output: default/dest1
FAILED: Error in semantic analysis: line 3:20 Input Format must implement InputFormat dest1
Index: ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (revision 770442)
+++ ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (working copy)
@@ -1,2 +1,8 @@
+query: DROP TABLE inv_valid_tbl1
+query: CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table'
+ PARTITIONED BY(aint DATETIME, country STRING)
+ CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
+ ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
+ STORED AS SEQUENCEFILE
FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition column name aint conflicts with table columns.
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Index: ql/src/test/results/clientpositive/inputddl8.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl8.q.out (working copy)
@@ -1,3 +1,10 @@
+query: DROP TABLE INPUTDDL8
+query: CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table'
+ PARTITIONED BY(ds DATETIME, country STRING)
+ CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
+ ROW FORMAT SERDE'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
+ STORED AS SEQUENCEFILE
+query: DESCRIBE EXTENDED INPUTDDL8
aint int from deserializer
astring string from deserializer
lint array from deserializer
@@ -7,4 +14,5 @@
ds datetime
country string
-Detailed Table Information Table(tableName:inputddl8,dbName:default,owner:rmurthy,createTime:1238030322,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/inputddl8,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.mapred.SequenceFileOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.ThriftDeserializer,parameters:{serialization.class=org.apache.hadoop.hive.serde2.thrift.test.Complex,serialization.format=com.facebook.thrift.protocol.TBinaryProtocol}),bucketCols:[aint],sortCols:[Order(col:lint,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment=This is a thrift based table})
+Detailed Table Information Table(tableName:inputddl8,dbName:default,owner:athusoo,createTime:1241278371,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/inputddl8,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.ThriftDeserializer,parameters:{serialization.class=org.apache.hadoop.hive.serde2.thrift.test.Complex,serialization.format=com.facebook.thrift.protocol.TBinaryProtocol}),bucketCols:[aint],sortCols:[Order(col:lint,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment=This is a thrift based table})
+query: DROP TABLE INPUTDDL8
Index: ql/src/test/results/clientpositive/cast1.q.out
===================================================================
--- ql/src/test/results/clientpositive/cast1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/cast1.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 STRING, c7 INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 3 2)) (TOK_SELEXPR (+ 3.0 2)) (TOK_SELEXPR (+ 3 2.0)) (TOK_SELEXPR (+ 3.0 2.0)) (TOK_SELEXPR (+ (+ 3 (TOK_FUNCTION TOK_INT 2.0)) (TOK_FUNCTION TOK_INT (TOK_FUNCTION TOK_SMALLINT 0)))) (TOK_SELEXPR (TOK_FUNCTION TOK_BOOLEAN 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT TRUE))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) key) 86))))
@@ -54,4 +57,10 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: select dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/351550201/345617503.10000
5 5.0 5.0 5.0 5 true 1
Index: ql/src/test/results/clientpositive/input16_cc.q.out
===================================================================
--- ql/src/test/results/clientpositive/input16_cc.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input16_cc.q.out (working copy)
@@ -1,3 +1,9 @@
+query: DROP TABLE INPUT16_CC
+query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC
+query: SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC
+Input: default/input16_cc
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/505022158/998217510.10000
val_238 238
val_86 86
val_311 311
@@ -498,3 +504,4 @@
val_400 400
val_200 200
val_97 97
+query: DROP TABLE INPUT16_CC
Index: ql/src/test/results/clientpositive/order.q.out
===================================================================
--- ql/src/test/results/clientpositive/order.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/order.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRC x ORDER BY key limit 10
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_ORDERBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10)))
@@ -42,6 +44,9 @@
limit: 10
+query: SELECT x.* FROM SRC x ORDER BY key limit 10
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1564222471/1481411475.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/quote1.q.out
===================================================================
--- ql/src/test/results/clientpositive/quote1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/quote1.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(`location` INT, `type` STRING) PARTITIONED BY(`table` STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 PARTITION(`table`='2008-04-08') SELECT src.key as `partition`, src.value as `from` WHERE src.key >= 200 and src.key < 300
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1 (TOK_PARTSPEC (TOK_PARTVAL `table` '2008-04-08')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) `partition`) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) `from`)) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 200) (< (. (TOK_TABLE_OR_COL src) key) 300)))))
@@ -48,6 +52,8 @@
name: dest1
+query: EXPLAIN
+SELECT `int`.`location`, `int`.`type`, `int`.`table` FROM dest1 `int` WHERE `int`.`table` = '2008-04-08'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1 `int`)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL `int`) `location`)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL `int`) `type`)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL `int`) `table`))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL `int`) `table`) '2008-04-08'))))
@@ -84,6 +90,13 @@
limit: -1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 PARTITION(`table`='2008-04-08') SELECT src.key as `partition`, src.value as `from` WHERE src.key >= 200 and src.key < 300
+Input: default/src
+Output: default/dest1/table=2008-04-08
+query: SELECT `int`.`location`, `int`.`type`, `int`.`table` FROM dest1 `int` WHERE `int`.`table` = '2008-04-08'
+Input: default/dest1/table=2008-04-08
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1243981525/976343478.10000
238 val_238 2008-04-08
255 val_255 2008-04-08
278 val_278 2008-04-08
Index: ql/src/test/results/clientpositive/notable_alias2.q.out
===================================================================
--- ql/src/test/results/clientpositive/notable_alias2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/notable_alias2.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR '1234') (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 100)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -81,6 +85,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/73789711/581555617.10000
1234 0 3.0
1234 10 1.0
1234 11 1.0
Index: ql/src/test/results/clientpositive/join2.q.out
===================================================================
--- ql/src/test/results/clientpositive/join2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join2.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest_j2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
+INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key))) (TOK_TABREF src src3) (= (+ (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)) (. (TOK_TABLE_OR_COL src3) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) value)))))
@@ -131,6 +135,13 @@
name: dest_j2
+query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
+INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value
+Input: default/src
+Output: default/dest_j2
+query: SELECT dest_j2.* FROM dest_j2
+Input: default/dest_j2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1281659057/880190123.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/input3.q.out
===================================================================
--- ql/src/test/results/clientpositive/input3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input3.q.out (working copy)
@@ -1,8 +1,16 @@
+query: DROP TABLE TEST3a
+query: DROP TABLE TEST3b
+query: DROP TABLE TEST3c
+query: CREATE TABLE TEST3a(A INT, B DOUBLE) STORED AS TEXTFILE
+query: DESCRIBE TEST3a
a int
b double
+query: CREATE TABLE TEST3b(A ARRAY, B DOUBLE, C MAP) STORED AS TEXTFILE
+query: DESCRIBE TEST3b
a array
b double
c map
+query: SHOW TABLES
src
src1
src_json
@@ -12,6 +20,8 @@
srcpart
test3a
test3b
+query: EXPLAIN
+ALTER TABLE TEST3b ADD COLUMNS (X DOUBLE)
ABSTRACT SYNTAX TREE:
(TOK_ALTERTABLE_ADDCOLS TEST3b (TOK_TABCOLLIST (TOK_TABCOL X TOK_DOUBLE)))
@@ -27,10 +37,14 @@
old name: TEST3b
+query: ALTER TABLE TEST3b ADD COLUMNS (X DOUBLE)
+query: DESCRIBE TEST3b
a array
b double
c map
x double
+query: EXPLAIN
+ALTER TABLE TEST3b RENAME TO TEST3c
ABSTRACT SYNTAX TREE:
(TOK_ALTERTABLE_RENAME TEST3b TEST3c)
@@ -46,10 +60,13 @@
old name: TEST3b
+query: ALTER TABLE TEST3b RENAME TO TEST3c
+query: DESCRIBE TEST3c
a array
b double
c map
x double
+query: SHOW TABLES
src
src1
src_json
@@ -59,6 +76,8 @@
srcpart
test3a
test3c
+query: EXPLAIN
+ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 DOUBLE)
ABSTRACT SYNTAX TREE:
(TOK_ALTERTABLE_REPLACECOLS TEST3c (TOK_TABCOLLIST (TOK_TABCOL R1 TOK_INT) (TOK_TABCOL R2 TOK_DOUBLE)))
@@ -74,7 +93,12 @@
old name: TEST3c
+query: ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 DOUBLE)
+query: DESCRIBE EXTENDED TEST3c
r1 int
r2 double
-Detailed Table Information Table(tableName:test3c,dbName:default,owner:rmurthy,createTime:1238030229,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:r1,type:int,comment:null), FieldSchema(name:r2,type:double,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/test3b,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=rmurthy,last_modified_time=1238030229})
+Detailed Table Information Table(tableName:test3c,dbName:default,owner:athusoo,createTime:1241278187,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:r1,type:int,comment:null), FieldSchema(name:r2,type:double,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/test3b,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=athusoo,last_modified_time=1241278187})
+query: DROP TABLE TEST3a
+query: DROP TABLE TEST3b
+query: DROP TABLE TEST3c
Index: ql/src/test/results/clientpositive/join6.q.out
===================================================================
--- ql/src/test/results/clientpositive/join6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join6.q.out (working copy)
@@ -1,3 +1,18 @@
+query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ FULL OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)))))
@@ -116,6 +131,24 @@
name: dest1
+query: FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ FULL OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/674683169/66863860.10000
11 val_11 NULL NULL
12 val_12 NULL NULL
12 val_12 NULL NULL
Index: ql/src/test/results/clientpositive/ppd_join3.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_join3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_join3.q.out (working copy)
@@ -1,3 +1,14 @@
+query: EXPLAIN
+SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2
+ON src1.c1 = src2.c3 AND src1.c1 < '400'
+JOIN
+(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3
+ON src1.c1 = src3.c5
+WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1')
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '11')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '12')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400'))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c6)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '13')))) src3) (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src3) c5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (and (and (and (and (> (. (TOK_TABLE_OR_COL src1) c1) '0') (or (<> (. (TOK_TABLE_OR_COL src1) c2) 'val_500') (> (. (TOK_TABLE_OR_COL src1) c1) '1'))) (or (> (. (TOK_TABLE_OR_COL src2) c3) '10') (<> (. (TOK_TABLE_OR_COL src1) c1) '10'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '4')) (<> (. (TOK_TABLE_OR_COL src3) c5) '1')))))
@@ -131,6 +142,18 @@
limit: -1
+query: SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2
+ON src1.c1 = src2.c3 AND src1.c1 < '400'
+JOIN
+(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3
+ON src1.c1 = src3.c5
+WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1')
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/860297500/296667267.10000
100 val_100
100 val_100
100 val_100
Index: ql/src/test/results/clientpositive/input7.q.out
===================================================================
--- ql/src/test/results/clientpositive/input7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input7.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 DOUBLE, c2 INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR TOK_NULL) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)))))
@@ -46,6 +50,13 @@
name: dest1
+query: FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key
+Input: default/src1
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/412916640/832757248.10000
NULL 238
NULL NULL
NULL 311
Index: ql/src/test/results/clientpositive/union11.q.out
===================================================================
--- ql/src/test/results/clientpositive/union11.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union11.q.out (working copy)
@@ -1,3 +1,9 @@
+query: explain
+ select unionsrc.key, count(1) FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2
+ UNION ALL
+ select 'tst3' as key, count(1) as value from src s3) unionsrc group by unionsrc.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst2' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst3' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key))))
@@ -48,7 +54,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1393391553/26823910.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/7097981/405016190.10002
Union
Group By Operator
aggregations:
@@ -69,7 +75,7 @@
value expressions:
expr: 1
type: bigint
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1393391553/26823910.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/7097981/405016190.10003
Union
Group By Operator
aggregations:
@@ -90,7 +96,7 @@
value expressions:
expr: 1
type: bigint
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1393391553/26823910.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/7097981/405016190.10004
Union
Group By Operator
aggregations:
@@ -205,6 +211,13 @@
limit: -1
+query: select unionsrc.key, count(1) FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2
+ UNION ALL
+ select 'tst3' as key, count(1) as value from src s3) unionsrc group by unionsrc.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1498343090/387630909.10000
tst1 1
tst2 1
tst3 1
Index: ql/src/test/results/clientpositive/ppd_constant_expr.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_constant_expr.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_constant_expr.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE ppd_constant_expr(c1 STRING, c2 INT, c3 DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB ppd_constant_expr)) (TOK_SELECT (TOK_SELEXPR (+ 4 TOK_NULL)) (TOK_SELEXPR (- (. (TOK_TABLE_OR_COL src1) key) TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL)))))
@@ -50,6 +54,13 @@
name: ppd_constant_expr
+query: FROM src1
+INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL
+Input: default/src1
+Output: default/ppd_constant_expr
+query: SELECT ppd_constant_expr.* FROM ppd_constant_expr
+Input: default/ppd_constant_expr
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/254664389/264213010.10000
NULL NULL NULL
NULL NULL NULL
NULL NULL NULL
@@ -75,3 +86,4 @@
NULL NULL NULL
NULL NULL NULL
NULL NULL NULL
+query: DROP TABLE ppd_constant_expr
Index: ql/src/test/results/clientpositive/ppd_union.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_union.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_union.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN
+FROM (
+ FROM src select src.key, src.value WHERE src.key < '100'
+ UNION ALL
+ FROM src SELECT src.* WHERE src.key > '150'
+) unioned_query
+SELECT unioned_query.*
+ WHERE key > '4' and value > 'val_4'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) '100')))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '150'))))) unioned_query)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF unioned_query))) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) '4') (> (TOK_TABLE_OR_COL value) 'val_4')))))
@@ -79,6 +87,15 @@
limit: -1
+query: FROM (
+ FROM src select src.key, src.value WHERE src.key < '100'
+ UNION ALL
+ FROM src SELECT src.* WHERE src.key > '150'
+) unioned_query
+SELECT unioned_query.*
+ WHERE key > '4' and value > 'val_4'
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/202047284/848303534.10000
86 val_86
409 val_409
98 val_98
Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy)
@@ -1,3 +1,4 @@
+query: explain extended select a.* from srcpart a where rand(1) < 0.1 and a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF a))) (TOK_WHERE (and (and (and (< (TOK_FUNCTION rand 1) 0.1) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08')) (not (or (> (TOK_TABLE_OR_COL key) 50) (< (TOK_TABLE_OR_COL key) 10)))) (like (. (TOK_TABLE_OR_COL a) hr) '%2')))))
@@ -27,7 +28,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/354827607/777023557.10001.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/770982235/339058649.10001.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -36,9 +37,9 @@
serialization.format 1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -57,7 +58,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
@@ -66,11 +67,15 @@
limit: -1
+query: select a.* from srcpart a where rand(1) < 0.1 and a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2'
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/461813857/72994216.10000
42 val_42 2008-04-08 12
44 val_44 2008-04-08 12
26 val_26 2008-04-08 12
18 val_18 2008-04-08 12
37 val_37 2008-04-08 12
+query: explain extended select a.* from srcpart a where a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF a))) (TOK_WHERE (and (and (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08') (not (or (> (TOK_TABLE_OR_COL key) 50) (< (TOK_TABLE_OR_COL key) 10)))) (like (. (TOK_TABLE_OR_COL a) hr) '%2')))))
@@ -100,7 +105,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/939483661/156334823.10001.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/203819110/60154549.10001.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -109,9 +114,9 @@
serialization.format 1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -130,7 +135,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
@@ -139,6 +144,9 @@
limit: -1
+query: select a.* from srcpart a where a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2'
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/306669229/518134414.10000
27 val_27 2008-04-08 12
37 val_37 2008-04-08 12
15 val_15 2008-04-08 12
Index: ql/src/test/results/clientpositive/union15.q.out
===================================================================
--- ql/src/test/results/clientpositive/union15.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union15.q.out (working copy)
@@ -1,3 +1,9 @@
+query: explain
+ select unionsrc.key, count(1) FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src1 s2
+ UNION ALL
+ select s3.key as key, s3.value as value from src1 s3) unionsrc group by unionsrc.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src1 s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src1 s3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s3) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s3) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key))))
@@ -48,7 +54,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/191229493/1003011693.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1766444/227040434.10002
Union
Group By Operator
aggregations:
@@ -69,7 +75,7 @@
value expressions:
expr: 1
type: bigint
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/191229493/1003011693.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1766444/227040434.10003
Union
Group By Operator
aggregations:
@@ -90,7 +96,7 @@
value expressions:
expr: 1
type: bigint
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/191229493/1003011693.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1766444/227040434.10004
Union
Group By Operator
aggregations:
@@ -173,6 +179,14 @@
limit: -1
+query: select unionsrc.key, count(1) FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src1 s2
+ UNION ALL
+ select s3.key as key, s3.value as value from src1 s3) unionsrc group by unionsrc.key
+Input: default/src
+Input: default/src1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/587819082/617717160.10000
20
128 2
146 2
Index: ql/src/test/results/clientpositive/groupby2_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby2_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby2_noskew.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_g2)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))
@@ -66,6 +70,13 @@
name: dest_g2
+query: FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+Input: default/src
+Output: default/dest_g2
+query: SELECT dest_g2.* FROM dest_g2
+Input: default/dest_g2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/236352888/719349372.10000
0 1 00.0
1 71 116414.0
2 69 225571.0
Index: ql/src/test/results/clientpositive/udf1.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf1.q.out (working copy)
@@ -1,3 +1,13 @@
+query: CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING,
+ c5 STRING, c6 STRING, c7 STRING, c8 STRING,
+ c9 STRING, c10 STRING, c11 STRING, c12 STRING, c13 STRING,
+ c14 STRING, c15 STRING, c16 STRING, c17 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT 'a' LIKE '%a%', 'b' LIKE '%a%', 'ab' LIKE '%a%', 'ab' LIKE '%a_',
+ '%_' LIKE '\%\_', 'ab' LIKE '\%\_', 'ab' LIKE '_a%', 'ab' LIKE 'a',
+ '' RLIKE '.*', 'a' RLIKE '[ab]', '' RLIKE '[ab]', 'hadoop' RLIKE '[a-z]*', 'hadoop' RLIKE 'o*',
+ REGEXP_REPLACE('abc', 'b', 'c'), REGEXP_REPLACE('abc', 'z', 'a'), REGEXP_REPLACE('abbbb', 'bb', 'b'), REGEXP_REPLACE('hadoop', '(.)[a-z]*', '$1ive')
+ WHERE src.key = 86
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (LIKE 'a' '%a%')) (TOK_SELEXPR (LIKE 'b' '%a%')) (TOK_SELEXPR (LIKE 'ab' '%a%')) (TOK_SELEXPR (LIKE 'ab' '%a_')) (TOK_SELEXPR (LIKE '%_' '\%\_')) (TOK_SELEXPR (LIKE 'ab' '\%\_')) (TOK_SELEXPR (LIKE 'ab' '_a%')) (TOK_SELEXPR (LIKE 'ab' 'a')) (TOK_SELEXPR (RLIKE '' '.*')) (TOK_SELEXPR (RLIKE 'a' '[ab]')) (TOK_SELEXPR (RLIKE '' '[ab]')) (TOK_SELEXPR (RLIKE 'hadoop' '[a-z]*')) (TOK_SELEXPR (RLIKE 'hadoop' 'o*')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abc' 'b' 'c')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abc' 'z' 'a')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'abbbb' 'bb' 'b')) (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE 'hadoop' '(.)[a-z]*' '$1ive'))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) key) 86))))
@@ -74,4 +84,14 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT 'a' LIKE '%a%', 'b' LIKE '%a%', 'ab' LIKE '%a%', 'ab' LIKE '%a_',
+ '%_' LIKE '\%\_', 'ab' LIKE '\%\_', 'ab' LIKE '_a%', 'ab' LIKE 'a',
+ '' RLIKE '.*', 'a' RLIKE '[ab]', '' RLIKE '[ab]', 'hadoop' RLIKE '[a-z]*', 'hadoop' RLIKE 'o*',
+ REGEXP_REPLACE('abc', 'b', 'c'), REGEXP_REPLACE('abc', 'z', 'a'), REGEXP_REPLACE('abbbb', 'bb', 'b'), REGEXP_REPLACE('hadoop', '(.)[a-z]*', '$1ive')
+ WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1029990279/355552817.10000
true false true true true false false false true true false true false acc abc abb hive
Index: ql/src/test/results/clientpositive/join12.q.out
===================================================================
--- ql/src/test/results/clientpositive/join12.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join12.q.out (working copy)
@@ -1,3 +1,13 @@
+query: EXPLAIN
+SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src) src2
+ON src1.c1 = src2.c3 AND src1.c1 < 100
+JOIN
+(SELECT src.key as c5, src.value as c6 from src) src3
+ON src1.c1 = src3.c5 AND src3.c5 < 80
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) 100))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c6)))) src3) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src3) c5)) (< (. (TOK_TABLE_OR_COL src3) c5) 80)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4)))))
@@ -107,6 +117,17 @@
limit: -1
+query: SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src) src2
+ON src1.c1 = src2.c3 AND src1.c1 < 100
+JOIN
+(SELECT src.key as c5, src.value as c6 from src) src3
+ON src1.c1 = src3.c5 AND src3.c5 < 80
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/414875832/942578918.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/groupby5_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby5_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby5_map_skew.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT sum(src.key)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (. (TOK_TABLE_OR_COL src) key))))))
@@ -57,4 +60,10 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT sum(src.key)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207636079/315601969.10000
130091
Index: ql/src/test/results/clientpositive/union19.q.out
===================================================================
--- ql/src/test/results/clientpositive/union19.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union19.q.out (working copy)
@@ -1,3 +1,13 @@
+query: drop table DEST1
+query: drop table DEST2
+query: CREATE TABLE DEST1(key STRING, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE
+query: explain
+FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+INSERT OVERWRITE TABLE DEST1 SELECT unionsrc.key, count(unionsrc.value) group by unionsrc.key
+INSERT OVERWRITE TABLE DEST2 SELECT unionsrc.key, unionsrc.value, unionsrc.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL unionsrc) value)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -47,7 +57,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/athusoo/commits/hive_trunk_ws7/build/ql/tmp/1555732948/1195810282.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/254461326/1118368198.10004
Union
Group By Operator
aggregations:
@@ -84,7 +94,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest2
- /data/users/athusoo/commits/hive_trunk_ws7/build/ql/tmp/1555732948/1195810282.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/254461326/1118368198.10005
Union
Group By Operator
aggregations:
@@ -179,6 +189,17 @@
name: binary_table
+query: FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+INSERT OVERWRITE TABLE DEST1 SELECT unionsrc.key, count(unionsrc.value) group by unionsrc.key
+INSERT OVERWRITE TABLE DEST2 SELECT unionsrc.key, unionsrc.value, unionsrc.value
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/467532129/948299768.10000
0 3
10 1
100 2
@@ -489,6 +510,9 @@
97 2
98 2
tst1 1
+query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/168132316/261490647.10000
0 val_0 val_0
0 val_0 val_0
0 val_0 val_0
@@ -990,3 +1014,5 @@
98 val_98 val_98
98 val_98 val_98
tst1 500 500
+query: drop table DEST1
+query: drop table DEST2
Index: ql/src/test/results/clientpositive/udf5.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf5.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: EXPLAIN
+SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446340)), day('2008-11-01'), month('2008-11-01'), year('2008-11-01'), day('2008-11-01 15:32:20'), month('2008-11-01 15:32:20'), year('2008-11-01 15:32:20') FROM dest1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION from_unixtime 1226446340)) (TOK_SELEXPR (TOK_FUNCTION to_date (TOK_FUNCTION from_unixtime 1226446340))) (TOK_SELEXPR (TOK_FUNCTION day '2008-11-01')) (TOK_SELEXPR (TOK_FUNCTION month '2008-11-01')) (TOK_SELEXPR (TOK_FUNCTION year '2008-11-01')) (TOK_SELEXPR (TOK_FUNCTION day '2008-11-01 15:32:20')) (TOK_SELEXPR (TOK_FUNCTION month '2008-11-01 15:32:20')) (TOK_SELEXPR (TOK_FUNCTION year '2008-11-01 15:32:20')))))
@@ -41,4 +47,7 @@
limit: -1
+query: SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446340)), day('2008-11-01'), month('2008-11-01'), year('2008-11-01'), day('2008-11-01 15:32:20'), month('2008-11-01 15:32:20'), year('2008-11-01 15:32:20') FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/396140468/103691975.10000
2008-11-11 15:32:20 2008-11-11 1 11 2008 1 11 2008
Index: ql/src/test/results/clientpositive/input13.q.out
===================================================================
--- ql/src/test/results/clientpositive/input13.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input13.q.out (working copy)
@@ -1,3 +1,12 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 100) (< (. (TOK_TABLE_OR_COL src) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest3 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 200) (< (. (TOK_TABLE_OR_COL src) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/dest4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL src) key) 300))))
@@ -122,6 +131,19 @@
name: dest3
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+Output: default/dest3/ds=2008-04-08/hr=12
+Output: ../build/ql/test/data/warehouse/dest4.out
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/101162710/400931609.10000
86 val_86
27 val_27
98 val_98
@@ -206,6 +228,9 @@
37 val_37
90 val_90
97 val_97
+query: SELECT dest2.* FROM dest2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/276246666/1730128617.10000
165 val_165
193 val_193
150 val_150
@@ -311,6 +336,9 @@
194 val_194
126 val_126
169 val_169
+query: SELECT dest3.* FROM dest3
+Input: default/dest3/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/267290504/751561735.10000
238 2008-04-08 12
255 2008-04-08 12
278 2008-04-08 12
Index: ql/src/test/results/clientpositive/input_part0.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part0.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part0.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) ds) '2008-04-08'))))
@@ -10,6 +12,10 @@
limit: -1
+query: SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/215109247/3430598.10000
238 val_238 2008-04-08 11
86 val_86 2008-04-08 11
311 val_311 2008-04-08 11
Index: ql/src/test/results/clientpositive/join16.q.out
===================================================================
--- ql/src/test/results/clientpositive/join16.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join16.q.out (working copy)
@@ -1,3 +1,4 @@
+query: EXPLAIN SELECT subq.key, tab.value FROM (select a.key, a.value from src a where a.key > 10 ) subq JOIN src tab ON (subq.key = tab.key and subq.key > 20 and subq.value = tab.value) where tab.value < 200
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL a) key) 10)))) subq) (TOK_TABREF src tab) (and (and (= (. (TOK_TABLE_OR_COL subq) key) (. (TOK_TABLE_OR_COL tab) key)) (> (. (TOK_TABLE_OR_COL subq) key) 20)) (= (. (TOK_TABLE_OR_COL subq) value) (. (TOK_TABLE_OR_COL tab) value))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL subq) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tab) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tab) value) 200))))
Index: ql/src/test/results/clientpositive/input17.q.out
===================================================================
--- ql/src/test/results/clientpositive/input17.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input17.q.out (working copy)
@@ -1,3 +1,12 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src_thrift
+ SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (. (TOK_TABLE_OR_COL src_thrift) aint) ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0)) ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
@@ -77,6 +86,18 @@
name: dest1
+query: FROM (
+ FROM src_thrift
+ SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/10910343/1742990479.10000
NULL {"myint":null,"mystring":null}
-1461153966 {"myint":49,"mystring":"343"}
-1952710705 {"myint":25,"mystring":"125"}
Index: ql/src/test/results/clientpositive/udf9.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf9.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf9.q.out (working copy)
@@ -1,3 +1,13 @@
+query: EXPLAIN
+SELECT DATEDIFF('2008-12-31', '2009-01-01'), DATEDIFF('2008-03-01', '2008-02-28'),
+ DATEDIFF('2007-03-01', '2007-01-28'), DATEDIFF('2008-03-01 23:59:59', '2008-03-02 00:00:00'),
+ DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365),
+ DATE_ADD('2008-02-28', 2), DATE_ADD('2009-02-28', 2),
+ DATE_ADD('2007-02-28', 365), DATE_ADD('2007-02-28 23:59:59', 730),
+ DATE_SUB('2009-01-01', 1), DATE_SUB('2009-01-01', 365),
+ DATE_SUB('2008-02-28', 2), DATE_SUB('2009-02-28', 2),
+ DATE_SUB('2007-02-28', 365), DATE_SUB('2007-02-28 01:12:34', 730)
+ FROM src WHERE src.key = 86
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION DATEDIFF '2008-12-31' '2009-01-01')) (TOK_SELEXPR (TOK_FUNCTION DATEDIFF '2008-03-01' '2008-02-28')) (TOK_SELEXPR (TOK_FUNCTION DATEDIFF '2007-03-01' '2007-01-28')) (TOK_SELEXPR (TOK_FUNCTION DATEDIFF '2008-03-01 23:59:59' '2008-03-02 00:00:00')) (TOK_SELEXPR (TOK_FUNCTION DATE_ADD '2008-12-31' 1)) (TOK_SELEXPR (TOK_FUNCTION DATE_ADD '2008-12-31' 365)) (TOK_SELEXPR (TOK_FUNCTION DATE_ADD '2008-02-28' 2)) (TOK_SELEXPR (TOK_FUNCTION DATE_ADD '2009-02-28' 2)) (TOK_SELEXPR (TOK_FUNCTION DATE_ADD '2007-02-28' 365)) (TOK_SELEXPR (TOK_FUNCTION DATE_ADD '2007-02-28 23:59:59' 730)) (TOK_SELEXPR (TOK_FUNCTION DATE_SUB '2009-01-01' 1)) (TOK_SELEXPR (TOK_FUNCTION DATE_SUB '2009-01-01' 365)) (TOK_SELEXPR (TOK_FUNCTION DATE_SUB '2008-02-28' 2)) (TOK_SELEXPR (TOK_FUNCTION DATE_SUB '2009-02-28' 2)) (TOK_SELEXPR (TOK_FUNCTION DATE_SUB '2007-02-28' 365)) (TOK_SELEXPR (TOK_FUNCTION DATE_SUB '2007-02-28 01:12:34' 730))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) key) 86))))
@@ -64,4 +74,15 @@
limit: -1
+query: SELECT DATEDIFF('2008-12-31', '2009-01-01'), DATEDIFF('2008-03-01', '2008-02-28'),
+ DATEDIFF('2007-03-01', '2007-01-28'), DATEDIFF('2008-03-01 23:59:59', '2008-03-02 00:00:00'),
+ DATE_ADD('2008-12-31', 1), DATE_ADD('2008-12-31', 365),
+ DATE_ADD('2008-02-28', 2), DATE_ADD('2009-02-28', 2),
+ DATE_ADD('2007-02-28', 365), DATE_ADD('2007-02-28 23:59:59', 730),
+ DATE_SUB('2009-01-01', 1), DATE_SUB('2009-01-01', 365),
+ DATE_SUB('2008-03-01', 2), DATE_SUB('2009-03-01', 2),
+ DATE_SUB('2007-02-28', 365), DATE_SUB('2007-02-28 01:12:34', 730)
+ FROM src WHERE src.key = 86
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/804319999/376709074.10000
-1 2 32 -1 2009-01-01 2009-12-31 2008-03-01 2009-03-02 2008-02-28 2009-02-27 2008-12-31 2008-01-02 2008-02-28 2009-02-27 2006-02-28 2005-02-28
Index: ql/src/test/results/clientpositive/groupby1.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby1.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_g1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -42,7 +45,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/169668679/777643789.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/121598851/279205958.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -95,6 +98,12 @@
name: dest_g1
+query: FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+Input: default/src
+Output: default/dest_g1
+query: SELECT dest_g1.* FROM dest_g1
+Input: default/dest_g1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/71965672/1050923207.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/input_part4.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part4.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 15
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL x) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL x) hr) 15)))))
@@ -10,3 +12,5 @@
limit: -1
+query: SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 15
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/11596454/223755609.10000
Index: ql/src/test/results/clientpositive/nullgroup2.q.out
===================================================================
--- ql/src/test/results/clientpositive/nullgroup2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/nullgroup2.q.out (working copy)
@@ -1,3 +1,5 @@
+query: explain
+select x.key, count(1) from src x where x.key > 9999 group by x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL x) key))))
@@ -57,7 +59,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/520309707/211312349.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/538828350/1358281988.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -96,6 +98,11 @@
limit: -1
+query: select x.key, count(1) from src x where x.key > 9999 group by x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/324222383/5029306.10000
+query: explain
+select x.key, count(1) from src x where x.key > 9999 group by x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL x) key))))
@@ -161,6 +168,11 @@
limit: -1
+query: select x.key, count(1) from src x where x.key > 9999 group by x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/354390972/29453903.10000
+query: explain
+select x.key, count(1) from src x where x.key > 9999 group by x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL x) key))))
@@ -213,7 +225,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1162050685/275319179.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/221557343/272912479.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -252,6 +264,11 @@
limit: -1
+query: select x.key, count(1) from src x where x.key > 9999 group by x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/238392747/257741696.10000
+query: explain
+select x.key, count(1) from src x where x.key > 9999 group by x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL x) key))))
@@ -310,3 +327,6 @@
limit: -1
+query: select x.key, count(1) from src x where x.key > 9999 group by x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/865137866/5631075.10000
Index: ql/src/test/results/clientpositive/udf_case.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_case.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_case.q.out (working copy)
@@ -1,3 +1,30 @@
+query: EXPLAIN
+SELECT CASE 1
+ WHEN 1 THEN 2
+ WHEN 3 THEN 4
+ ELSE 5
+ END,
+ CASE 2
+ WHEN 1 THEN 2
+ ELSE 5
+ END,
+ CASE 14
+ WHEN 12 THEN 13
+ WHEN 14 THEN 15
+ END,
+ CASE 16
+ WHEN 12 THEN 13
+ WHEN 14 THEN 15
+ END,
+ CASE 17
+ WHEN 18 THEN NULL
+ WHEN 17 THEN 20
+ END,
+ CASE 21
+ WHEN 22 THEN 23
+ WHEN 21 THEN 24
+ END
+FROM src LIMIT 1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CASE 1 1 2 3 4 5)) (TOK_SELEXPR (TOK_FUNCTION CASE 2 1 2 5)) (TOK_SELEXPR (TOK_FUNCTION CASE 14 12 13 14 15)) (TOK_SELEXPR (TOK_FUNCTION CASE 16 12 13 14 15)) (TOK_SELEXPR (TOK_FUNCTION CASE 17 18 TOK_NULL 17 20)) (TOK_SELEXPR (TOK_FUNCTION CASE 21 22 23 21 24))) (TOK_LIMIT 1)))
@@ -38,4 +65,32 @@
limit: 1
+query: SELECT CASE 1
+ WHEN 1 THEN 2
+ WHEN 3 THEN 4
+ ELSE 5
+ END,
+ CASE 2
+ WHEN 1 THEN 2
+ ELSE 5
+ END,
+ CASE 14
+ WHEN 12 THEN 13
+ WHEN 14 THEN 15
+ END,
+ CASE 16
+ WHEN 12 THEN 13
+ WHEN 14 THEN 15
+ END,
+ CASE 17
+ WHEN 18 THEN NULL
+ WHEN 17 THEN 20
+ END,
+ CASE 21
+ WHEN 22 THEN 23
+ WHEN 21 THEN 24
+ END
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/116746963/155290781.10000
2 5 15 NULL 20 24
Index: ql/src/test/results/clientpositive/groupby5.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby5.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT src.key, sum(substr(src.value,5))
+FROM src
+GROUP BY src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -42,7 +48,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/115761756/655597342.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/496157129/694276302.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -95,6 +101,15 @@
name: dest1
+query: INSERT OVERWRITE TABLE dest1
+SELECT src.key, sum(substr(src.value,5))
+FROM src
+GROUP BY src.key
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/898371644/800050208.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/union4.q.out
===================================================================
--- ql/src/test/results/clientpositive/union4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union4.q.out (working copy)
@@ -1,3 +1,10 @@
+query: drop table tmptable
+query: create table tmptable(key string, value int)
+query: explain
+insert overwrite table tmptable
+ select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst2' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB tmptable)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -47,7 +54,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/821364331/229014134.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/371236334/463237113.10002
Union
Select Operator
expressions:
@@ -69,7 +76,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/821364331/229014134.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/371236334/463237113.10003
Union
Select Operator
expressions:
@@ -137,5 +144,15 @@
name: binary_table
+query: insert overwrite table tmptable
+select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2) unionsrc
+Input: default/src
+Output: default/tmptable
+query: select * from tmptable x sort by x.key
+Input: default/tmptable
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/100441451/111422539.10000
tst1 500
tst2 500
+query: drop table tmptable
Index: ql/src/test/results/clientpositive/input_part8.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part8.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRCPART x WHERE ds = '2008-04-08' LIMIT 10
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '2008-04-08')) (TOK_LIMIT 10)))
@@ -10,6 +12,10 @@
limit: 10
+query: SELECT x.* FROM SRCPART x WHERE ds = '2008-04-08' LIMIT 10
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/237689677/180894904.10000
238 val_238 2008-04-08 11
86 val_86 2008-04-08 11
311 val_311 2008-04-08 11
Index: ql/src/test/results/clientpositive/ppd_clusterby.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_clusterby.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_clusterby.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 10)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
@@ -52,7 +54,12 @@
limit: -1
+query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/500213612/116730221.10000
10 val_10
+query: EXPLAIN
+SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) key))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1))))
@@ -131,7 +138,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/184785750/636311536.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/23042782/2223093.10002
Reduce Output Operator
key expressions:
expr: 1
@@ -162,4 +169,7 @@
limit: -1
+query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/50956604/873637666.10000
20 val_20 20
Index: ql/src/test/results/clientpositive/union8.q.out
===================================================================
--- ql/src/test/results/clientpositive/union8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union8.q.out (working copy)
@@ -1,3 +1,7 @@
+query: explain
+ select unionsrc.key, unionsrc.value FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2 UNION ALL
+ select s3.key as key, s3.value as value from src s3) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) value) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s3) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s3) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -75,6 +79,11 @@
limit: -1
+query: select unionsrc.key, unionsrc.value FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2 UNION ALL
+ select s3.key as key, s3.value as value from src s3) unionsrc
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/158356330/11376793.10000
238 val_238
238 val_238
238 val_238
Index: ql/src/test/results/clientpositive/groupby9.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby9.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby9.q.out (working copy)
@@ -1,3 +1,11 @@
+query: drop table DEST1
+query: drop table DEST2
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, SRC.value, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key, SRC.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) value)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key) (. (TOK_TABLE_OR_COL SRC) value))))
@@ -101,7 +109,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1510455594/320201292.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/31497839/593540242.10004
Reduce Output Operator
key expressions:
expr: 0
@@ -156,6 +164,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, SRC.value, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key, SRC.value
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/152745787/1102138791.10000
0 1
10 1
100 1
@@ -465,6 +482,9 @@
96 1
97 1
98 1
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1433096776/86175165.10000
0 val_0 1
10 val_10 1
100 val_100 1
@@ -774,3 +794,5 @@
96 val_96 1
97 val_97 1
98 val_98 1
+query: drop table DEST1
+query: drop table DEST2
Index: ql/src/test/results/clientpositive/insert1.q.out
===================================================================
--- ql/src/test/results/clientpositive/insert1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/insert1.q.out (working copy)
@@ -0,0 +1,9 @@
+query: drop table insert1
+query: drop table insert2
+query: create table insert1(key int, value string) stored as textfile
+query: create table insert2(key int, value string) stored as textfile
+query: insert overwrite table insert1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+Input: default/insert2
+Output: default/insert1
+query: drop table insert1
+query: drop table insert2
Index: ql/src/test/results/clientpositive/mapreduce3.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce3.q.out (working copy)
@@ -1,3 +1,10 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
@@ -74,6 +81,16 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/122478505/180989797.10000
0 0 0 val_0
0 0 0 val_0
0 0 0 val_0
Index: ql/src/test/results/clientpositive/showparts.q.out
===================================================================
--- ql/src/test/results/clientpositive/showparts.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/showparts.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SHOW PARTITIONS srcpart
ABSTRACT SYNTAX TREE:
(TOK_SHOWPARTITIONS srcpart)
@@ -16,6 +18,7 @@
limit: -1
+query: SHOW PARTITIONS srcpart
ds=2008-04-08/hr=11
ds=2008-04-08/hr=12
ds=2008-04-09/hr=11
Index: ql/src/test/results/clientpositive/union20.q.out
===================================================================
--- ql/src/test/results/clientpositive/union20.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union20.q.out (working copy)
@@ -1,3 +1,13 @@
+query: explain
+SELECT unionsrc1.key, unionsrc1.value, unionsrc2.key, unionsrc2.value
+FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2 where s2.key < 10) unionsrc1
+JOIN
+ (select 'tst1' as key, cast(count(1) as string) as value from src s3
+ UNION ALL
+ select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2
+ON (unionsrc1.key = unionsrc2.key)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value)) (TOK_WHERE (< (. (TOK_TABLE_OR_COL s2) key) 10))))) unionsrc1) (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s4)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s4) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s4) value) value)) (TOK_WHERE (< (. (TOK_TABLE_OR_COL s4) key) 10))))) unionsrc2) (= (. (TOK_TABLE_OR_COL unionsrc1) key) (. (TOK_TABLE_OR_COL unionsrc2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc1) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc2) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc2) value)))))
@@ -52,7 +62,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/55154034/326347910.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/172458251/1663645008.10002
Union
Reduce Output Operator
key expressions:
@@ -68,7 +78,7 @@
type: string
expr: 1
type: string
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/55154034/326347910.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/172458251/1663645008.10003
Union
Reduce Output Operator
key expressions:
@@ -170,7 +180,7 @@
Stage: Stage-5
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/55154034/326347910.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/172458251/1663645008.10004
Union
File Output Operator
compressed: false
@@ -179,7 +189,7 @@
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
name: binary_table
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/55154034/326347910.10006
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/172458251/1663645008.10006
Union
File Output Operator
compressed: false
@@ -228,6 +238,17 @@
limit: -1
+query: SELECT unionsrc1.key, unionsrc1.value, unionsrc2.key, unionsrc2.value
+FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2 where s2.key < 10) unionsrc1
+JOIN
+ (select 'tst1' as key, cast(count(1) as string) as value from src s3
+ UNION ALL
+ select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2
+ON (unionsrc1.key = unionsrc2.key)
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1381661483/387712250.10000
0 val_0 0 val_0
0 val_0 0 val_0
0 val_0 0 val_0
Index: ql/src/test/results/clientpositive/mapreduce7.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce7.q.out (working copy)
@@ -1,3 +1,10 @@
+query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_ALLCOLREF src) (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST k v tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
@@ -86,6 +93,16 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/211040223/672345730.10000
0 val_0 0 0 0 val_0
0 val_0 0 0 0 val_0
0 val_0 0 0 0 val_0
Index: ql/src/test/results/clientpositive/order2.q.out
===================================================================
--- ql/src/test/results/clientpositive/order2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/order2.q.out (working copy)
@@ -1,3 +1,7 @@
+query: EXPLAIN
+SELECT subq.key, subq.value FROM
+(SELECT x.* FROM SRC x ORDER BY key limit 10) subq
+where subq.key < 10
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_ORDERBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10))) subq)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL subq) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL subq) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL subq) key) 10))))
@@ -52,6 +56,11 @@
limit: -1
+query: SELECT subq.key, subq.value FROM
+(SELECT x.* FROM SRC x ORDER BY key limit 10) subq
+where subq.key < 10
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/743790712/1177897584.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/input11_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/input11_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input11_limit.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_LIMIT 10)))
@@ -58,6 +62,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/76746890/763200943.10000
86 val_86
27 val_27
98 val_98
Index: ql/src/test/results/clientpositive/udf_when.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_when.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_when.q.out (working copy)
@@ -1,3 +1,30 @@
+query: EXPLAIN
+SELECT CASE
+ WHEN 1=1 THEN 2
+ WHEN 1=3 THEN 4
+ ELSE 5
+ END,
+ CASE
+ WHEN 6=7 THEN 8
+ ELSE 9
+ END,
+ CASE
+ WHEN 10=11 THEN 12
+ WHEN 13=13 THEN 14
+ END,
+ CASE
+ WHEN 15=16 THEN 17
+ WHEN 18=19 THEN 20
+ END,
+ CASE
+ WHEN 21=22 THEN NULL
+ WHEN 23=23 THEN 24
+ END,
+ CASE
+ WHEN 25=26 THEN 27
+ WHEN 28=28 THEN NULL
+ END
+FROM src LIMIT 1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION WHEN (= 1 1) 2 (= 1 3) 4 5)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 6 7) 8 9)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 10 11) 12 (= 13 13) 14)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 15 16) 17 (= 18 19) 20)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 21 22) TOK_NULL (= 23 23) 24)) (TOK_SELEXPR (TOK_FUNCTION WHEN (= 25 26) 27 (= 28 28) TOK_NULL))) (TOK_LIMIT 1)))
@@ -38,4 +65,32 @@
limit: 1
+query: SELECT CASE
+ WHEN 1=1 THEN 2
+ WHEN 1=3 THEN 4
+ ELSE 5
+ END,
+ CASE
+ WHEN 6=7 THEN 8
+ ELSE 9
+ END,
+ CASE
+ WHEN 10=11 THEN 12
+ WHEN 13=13 THEN 14
+ END,
+ CASE
+ WHEN 15=16 THEN 17
+ WHEN 18=19 THEN 20
+ END,
+ CASE
+ WHEN 21=22 THEN NULL
+ WHEN 23=23 THEN 24
+ END,
+ CASE
+ WHEN 25=26 THEN 27
+ WHEN 28=28 THEN NULL
+ END
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/262454053/648350622.10000
2 9 14 NULL 24 NULL
Index: ql/src/test/results/clientpositive/groupby4_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby4_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby4_map_skew.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT count(1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -54,4 +57,10 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT count(1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/4535965/295606977.10000
500
Index: ql/src/test/results/clientpositive/join21.q.out
===================================================================
--- ql/src/test/results/clientpositive/join21.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join21.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT * FROM src src1 LEFT OUTER JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key > 10) RIGHT OUTER JOIN src src3 ON (src2.key = src3.key AND src3.key < 10) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_LEFTOUTERJOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (AND (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)) (< (. (TOK_TABLE_OR_COL src1) key) 10)) (> (. (TOK_TABLE_OR_COL src2) key) 10))) (TOK_TABREF src src3) (AND (= (. (TOK_TABLE_OR_COL src2) key) (. (TOK_TABLE_OR_COL src3) key)) (< (. (TOK_TABLE_OR_COL src3) key) 10)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src3) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src3) value)))))
@@ -101,7 +103,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/218419201/103716512.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1131318152/340507767.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -145,6 +147,9 @@
limit: -1
+query: SELECT * FROM src src1 LEFT OUTER JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key > 10) RIGHT OUTER JOIN src src3 ON (src2.key = src3.key AND src3.key < 10) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/851206556/25821787.10000
NULL NULL NULL NULL 0 val_0
NULL NULL NULL NULL 0 val_0
NULL NULL NULL NULL 0 val_0
Index: ql/src/test/results/clientpositive/ppd_join.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_join.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_join.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN
+SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2
+ON src1.c1 = src2.c3 AND src1.c1 < '400'
+WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4')
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '1')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '2')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (and (and (and (> (. (TOK_TABLE_OR_COL src1) c1) '20') (or (< (. (TOK_TABLE_OR_COL src1) c2) 'val_50') (> (. (TOK_TABLE_OR_COL src1) c1) '2'))) (or (> (. (TOK_TABLE_OR_COL src2) c3) '50') (< (. (TOK_TABLE_OR_COL src1) c1) '50'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '4')))))
@@ -100,6 +108,15 @@
limit: -1
+query: SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2
+ON src1.c1 = src2.c3 AND src1.c1 < '400'
+WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4')
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/351743825/114158213.10000
200 val_200
200 val_200
200 val_200
Index: ql/src/test/results/clientpositive/ppd_outer_join2.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_outer_join2.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN
+ FROM
+ src a
+ RIGHT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) '10') (< (. (TOK_TABLE_OR_COL a) key) '20')) (> (. (TOK_TABLE_OR_COL b) key) '15')) (< (. (TOK_TABLE_OR_COL b) key) '25')))))
@@ -76,6 +84,15 @@
limit: -1
+query: FROM
+ src a
+ RIGHT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/705048581/107248741.10000
150 val_150 150 val_150
152 val_152 152 val_152
152 val_152 152 val_152
Index: ql/src/test/results/clientpositive/groupby7_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby7_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby7_noskew.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))))
@@ -79,7 +85,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/81657871/823186010.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1287144918/310129488.10004
Reduce Output Operator
key expressions:
expr: key
@@ -122,6 +128,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/378119846/485228802.10000
0 0.0
10 10.0
100 200.0
@@ -431,6 +446,9 @@
96 96.0
97 194.0
98 196.0
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/887469224/901850559.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/sample4.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample4.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
@@ -25,7 +29,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/1552595147/428871149.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/554668367/995098198.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -39,14 +43,14 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -62,7 +66,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcbucket
@@ -70,7 +74,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/1552595147/428871149.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/554668367/995098198.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -84,11 +88,19 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/554668367/995098198.10001
+query: INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+Input: default/srcbucket
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/483056409/195284232.10000
238 val_238
86 val_86
311 val_311
Index: ql/src/test/results/clientpositive/inputddl3.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl3.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
(TOK_CREATETABLE INPUTDDL3 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t')) TOK_TBLTEXTFILE)
@@ -18,5 +20,8 @@
isExternal: false
+query: CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE
+query: DESCRIBE INPUTDDL3
key int
value string
+query: DROP TABLE INPUTDDL3
Index: ql/src/test/results/clientpositive/groupby2_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby2_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby2_map.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))
@@ -81,6 +85,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/929309782/1021734362.10000
0 1 00.0
1 71 116414.0
2 69 225571.0
Index: ql/src/test/results/clientpositive/sample8.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample8.q.out (working copy)
@@ -1,3 +1,10 @@
+query: EXPLAIN EXTENDED
+SELECT s.*
+FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON key) s
+JOIN srcpart TABLESAMPLE (BUCKET 1 OUT OF 10 ON key) t
+WHERE t.key = s.key and t.value = s.value and s.ds='2008-04-08' and s.hr='11' and s.ds='2008-04-08' and s.hr='11'
+DISTRIBUTE BY key, value
+SORT BY key, value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart (TOK_TABLESAMPLE 1 1 (TOK_TABLE_OR_COL key)) s) (TOK_TABREF srcpart (TOK_TABLESAMPLE 1 10 (TOK_TABLE_OR_COL key)) t))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (and (and (and (and (and (= (. (TOK_TABLE_OR_COL t) key) (. (TOK_TABLE_OR_COL s) key)) (= (. (TOK_TABLE_OR_COL t) value) (. (TOK_TABLE_OR_COL s) value))) (= (. (TOK_TABLE_OR_COL s) ds) '2008-04-08')) (= (. (TOK_TABLE_OR_COL s) hr) '11')) (= (. (TOK_TABLE_OR_COL s) ds) '2008-04-08')) (= (. (TOK_TABLE_OR_COL s) hr) '11'))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))
@@ -48,12 +55,12 @@
type: string
Needs Tagging: true
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-09
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-09
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
partition values:
ds 2008-04-08
@@ -72,10 +79,10 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -94,10 +101,10 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-09
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
Partition
partition values:
ds 2008-04-09
@@ -116,10 +123,10 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-09
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Partition
partition values:
ds 2008-04-09
@@ -138,7 +145,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
Reduce Operator Tree:
@@ -165,7 +172,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/87240990/187310002.10002
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1701195808/498985582.10002
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -178,7 +185,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/87240990/187310002.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1701195808/498985582.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -203,9 +210,9 @@
type: string
Needs Tagging: false
Path -> Alias:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/87240990/187310002.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1701195808/498985582.10002
Path -> Partition:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/87240990/187310002.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1701195808/498985582.10002
Partition
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -220,7 +227,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/96085592.10001.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/535977627.10001.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -233,6 +240,17 @@
limit: -1
+query: SELECT s.key, s.value
+FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON key) s
+JOIN srcpart TABLESAMPLE (BUCKET 1 OUT OF 10 ON key) t
+WHERE s.ds='2008-04-08' and s.hr='11' and s.ds='2008-04-08' and s.hr='11'
+DISTRIBUTE BY key, value
+SORT BY key, value
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Input: default/srcpart/ds=2008-04-09/hr=11
+Input: default/srcpart/ds=2008-04-09/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1694621949/361708294.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/inputddl7.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl7.q.out (working copy)
@@ -1,18 +1,50 @@
+query: DROP TABLE T1
+query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+query: SELECT COUNT(1) FROM T1
+Input: default/t1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/15999523/83038071.10000
500
+query: DROP TABLE T2
+query: CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T2
+query: SELECT COUNT(1) FROM T2
+Input: default/t2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/346849950/36939746.10000
500
+query: DROP TABLE T3
+query: CREATE TABLE T3(name STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09')
+query: SELECT COUNT(1) FROM T3 where T3.ds='2008-04-09'
+Input: default/t3/ds=2008-04-09
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1393596681/420008462.10000
500
+query: DROP TABLE T4
+query: CREATE TABLE T4(name STRING) PARTITIONED BY(ds STRING) STORED AS SEQUENCEFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09')
+query: SELECT COUNT(1) FROM T4 where T4.ds='2008-04-09'
+Input: default/t4/ds=2008-04-09
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1014261192/72527210.10000
500
+query: DESCRIBE EXTENDED T1
name string
-Detailed Table Information Table(tableName:t1,dbName:default,owner:rmurthy,createTime:1238030311,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/t1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:t1,dbName:default,owner:athusoo,createTime:1241278356,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/t1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: DESCRIBE EXTENDED T2
name string
-Detailed Table Information Table(tableName:t2,dbName:default,owner:rmurthy,createTime:1238030314,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/t2,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.mapred.SequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:t2,dbName:default,owner:athusoo,createTime:1241278359,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/t2,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: DESCRIBE EXTENDED T3 PARTITION (ds='2008-04-09')
name string
ds string
-Detailed Partition Information Partition(values:[2008-04-09],dbName:default,tableName:t3,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/t3/ds=2008-04-09,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{})
+Detailed Partition Information Partition(values:[2008-04-09],dbName:default,tableName:t3,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/t3/ds=2008-04-09,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{})
+query: DESCRIBE EXTENDED T4 PARTITION (ds='2008-04-09')
name string
ds string
-Detailed Partition Information Partition(values:[2008-04-09],dbName:default,tableName:t4,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/t4/ds=2008-04-09,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.mapred.SequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{})
+Detailed Partition Information Partition(values:[2008-04-09],dbName:default,tableName:t4,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:name,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/t4/ds=2008-04-09,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{})
+query: DROP TABLE T1
+query: DROP TABLE T2
+query: DROP TABLE T3
+query: DROP TABLE T4
Index: ql/src/test/results/clientpositive/groupby6_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby6_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby6_map.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECTDI (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5 1)))))
@@ -58,6 +62,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/351658209/252380541.10000
0
1
2
Index: ql/src/test/results/clientpositive/notable_alias1.q.out
===================================================================
--- ql/src/test/results/clientpositive/notable_alias1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/notable_alias1.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR '1234') (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))
@@ -81,6 +85,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/518865147/140769590.10000
1234 0 3.0
1234 10 1.0
1234 11 1.0
Index: ql/src/test/results/clientpositive/join1.q.out
===================================================================
--- ql/src/test/results/clientpositive/join1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join1.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest_j1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value)))))
@@ -80,6 +84,13 @@
name: dest_j1
+query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
+Input: default/src
+Output: default/dest_j1
+query: SELECT dest_j1.* FROM dest_j1
+Input: default/dest_j1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/476154546/603289343.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/input2.q.out
===================================================================
--- ql/src/test/results/clientpositive/input2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input2.q.out (working copy)
@@ -1,8 +1,15 @@
+query: DROP TABLE TEST2a
+query: DROP TABLE TEST2b
+query: CREATE TABLE TEST2a(A INT, B DOUBLE) STORED AS TEXTFILE
+query: DESCRIBE TEST2a
a int
b double
+query: CREATE TABLE TEST2b(A ARRAY, B DOUBLE, C MAP) STORED AS TEXTFILE
+query: DESCRIBE TEST2b
a array
b double
c map
+query: SHOW TABLES
src
src1
src_json
@@ -12,6 +19,8 @@
srcpart
test2a
test2b
+query: DROP TABLE TEST2a
+query: SHOW TABLES
src
src1
src_json
@@ -20,6 +29,9 @@
srcbucket
srcpart
test2b
+query: DROP TABLE TEST2b
+query: EXPLAIN
+SHOW TABLES
ABSTRACT SYNTAX TREE:
TOK_SHOWTABLES
@@ -37,3 +49,5 @@
limit: -1
+query: DROP TABLE TEST2a
+query: DROP TABLE TEST2b
Index: ql/src/test/results/clientpositive/ppd_multi_insert.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_multi_insert.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_multi_insert.q.out (working copy)
@@ -1,3 +1,15 @@
+query: DROP TABLE mi1
+query: DROP TABLE mi2
+query: DROP TABLE mi3
+query: CREATE TABLE mi1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE mi2(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE mi3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src a JOIN src b ON (a.key = b.key)
+INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
+INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
+INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB mi1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF a))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL a) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB mi2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 100) (< (. (TOK_TABLE_OR_COL a) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB mi3 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 200) (< (. (TOK_TABLE_OR_COL a) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/mi4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL a) key) 300))))
@@ -160,6 +172,19 @@
name: mi3
+query: FROM src a JOIN src b ON (a.key = b.key)
+INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
+INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
+INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+Input: default/src
+Output: default/mi1
+Output: default/mi2
+Output: default/mi3/ds=2008-04-08/hr=12
+Output: ../build/ql/test/data/warehouse/mi4.out
+query: SELECT mi1.* FROM mi1
+Input: default/mi1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/215104815/940509330.10000
0 val_0
0 val_0
0 val_0
@@ -308,6 +333,9 @@
98 val_98
98 val_98
98 val_98
+query: SELECT mi2.* FROM mi2
+Input: default/mi2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/196639737/1385053697.10000
100 val_100
100 val_100
100 val_100
@@ -519,6 +547,9 @@
199 val_199
199 val_199
199 val_199
+query: SELECT mi3.* FROM mi3
+Input: default/mi3/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1010810299/559593156.10000
200 2008-04-08 12
200 2008-04-08 12
200 2008-04-08 12
@@ -1188,3 +1219,6 @@
val_498
val_498
val_498
+query: DROP TABLE mi1
+query: DROP TABLE mi2
+query: DROP TABLE mi3
Index: ql/src/test/results/clientpositive/ppd_gby.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_gby.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_gby.q.out (working copy)
@@ -1,3 +1,8 @@
+query: EXPLAIN
+SELECT src1.c1
+FROM
+(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1
+WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400')
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c1) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) key)) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) value) 'val_10')) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) value)))) src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1))) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) c1) 'val_200') (or (> (. (TOK_TABLE_OR_COL src1) c2) 30) (< (. (TOK_TABLE_OR_COL src1) c1) 'val_400'))))))
@@ -71,6 +76,12 @@
limit: -1
+query: SELECT src1.c1
+FROM
+(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1
+WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400')
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/149093602/149899687.10000
val_201
val_202
val_203
Index: ql/src/test/results/clientpositive/ppd_join2.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_join2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_join2.q.out (working copy)
@@ -1,3 +1,14 @@
+query: EXPLAIN
+SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2
+ON src1.c1 = src2.c3 AND src1.c1 < '400'
+JOIN
+(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3
+ON src1.c2 = src3.c6
+WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '302')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '305')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400'))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c6)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '306')))) src3) (= (. (TOK_TABLE_OR_COL src1) c2) (. (TOK_TABLE_OR_COL src3) c6)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (and (and (and (and (<> (. (TOK_TABLE_OR_COL src1) c1) '311') (or (<> (. (TOK_TABLE_OR_COL src1) c2) 'val_50') (> (. (TOK_TABLE_OR_COL src1) c1) '1'))) (or (<> (. (TOK_TABLE_OR_COL src2) c3) '10') (<> (. (TOK_TABLE_OR_COL src1) c1) '10'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '14')) (<> (TOK_FUNCTION sqrt (. (TOK_TABLE_OR_COL src3) c5)) 13)))))
@@ -167,6 +178,18 @@
limit: -1
+query: SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2
+ON src1.c1 = src2.c3 AND src1.c1 < '400'
+JOIN
+(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3
+ON src1.c2 = src3.c6
+WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13)
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/44924503/742261563.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/join5.q.out
===================================================================
--- ql/src/test/results/clientpositive/join5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join5.q.out (working copy)
@@ -1,3 +1,18 @@
+query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ RIGHT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)))))
@@ -116,6 +131,24 @@
name: dest1
+query: FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ RIGHT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/229545052/1184877063.10000
17 val_17 17 val_17
18 val_18 18 val_18
18 val_18 18 val_18
Index: ql/src/test/results/clientpositive/describe_xpath.q.out
===================================================================
--- ql/src/test/results/clientpositive/describe_xpath.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/describe_xpath.q.out (working copy)
@@ -1,7 +1,13 @@
+query: describe src_thrift.lint
lint array from deserializer
+query: describe src_thrift.lint.$elem$
$elem$ int from deserializer
+query: describe src_thrift.mStringString.$key$
$key$ string from deserializer
+query: describe src_thrift.mStringString.$value$
$value$ string from deserializer
+query: describe src_thrift.lintString.$elem$
myint int from deserializer
mystring string from deserializer
+query: describe src_thrift.lintString.$elem$.myint
myint int from deserializer
Index: ql/src/test/results/clientpositive/udf_round.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_round.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_round.q.out (working copy)
@@ -1,5 +1,51 @@
+query: SELECT round(null), round(null, 0), round(125, null)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/165813041/1303869.10000
NULL NULL NULL
+query: SELECT
+ round(55555), round(55555, 0), round(55555, 1), round(55555, 2), round(55555, 3),
+ round(55555, -1), round(55555, -2), round(55555, -3), round(55555, -4),
+ round(55555, -5), round(55555, -6), round(55555, -7), round(55555, -8)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/388417828/112328812.10000
55555 55555.0 55555.0 55555.0 55555.0 55560.0 55600.0 56000.0 60000.0 100000.0 0.0 0.0 0.0
+query: SELECT
+ round(125.315), round(125.315, 0),
+ round(125.315, 1), round(125.315, 2), round(125.315, 3), round(125.315, 4),
+ round(125.315, -1), round(125.315, -2), round(125.315, -3), round(125.315, -4),
+ round(-125.315), round(-125.315, 0),
+ round(-125.315, 1), round(-125.315, 2), round(-125.315, 3), round(-125.315, 4),
+ round(-125.315, -1), round(-125.315, -2), round(-125.315, -3), round(-125.315, -4)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/14344880/300835303.10000
125 125.0 125.3 125.32 125.315 125.315 130.0 100.0 0.0 0.0 -125 -125.0 -125.3 -125.32 -125.315 -125.315 -130.0 -100.0 0.0 0.0
+query: SELECT
+ round(3.141592653589793, -15), round(3.141592653589793, -16),
+ round(3.141592653589793, -13), round(3.141592653589793, -14),
+ round(3.141592653589793, -11), round(3.141592653589793, -12),
+ round(3.141592653589793, -9), round(3.141592653589793, -10),
+ round(3.141592653589793, -7), round(3.141592653589793, -8),
+ round(3.141592653589793, -5), round(3.141592653589793, -6),
+ round(3.141592653589793, -3), round(3.141592653589793, -4),
+ round(3.141592653589793, -1), round(3.141592653589793, -2),
+ round(3.141592653589793, 0), round(3.141592653589793, 1),
+ round(3.141592653589793, 2), round(3.141592653589793, 3),
+ round(3.141592653589793, 4), round(3.141592653589793, 5),
+ round(3.141592653589793, 6), round(3.141592653589793, 7),
+ round(3.141592653589793, 8), round(3.141592653589793, 9),
+ round(3.141592653589793, 10), round(3.141592653589793, 11),
+ round(3.141592653589793, 12), round(3.141592653589793, 13),
+ round(3.141592653589793, 13), round(3.141592653589793, 14),
+ round(3.141592653589793, 15), round(3.141592653589793, 16)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/243474525/32427733.10000
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 3.0 3.1 3.14 3.142 3.1416 3.14159 3.141593 3.1415927 3.14159265 3.141592654 3.1415926536 3.14159265359 3.14159265359 3.1415926535898 3.1415926535898 3.14159265358979 3.141592653589793 3.141592653589793
+query: SELECT round(1809242.3151111344, 9), round(-1809242.3151111344, 9)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/798926241/339405158.10000
1809242.315111134 -1809242.315111134
Index: ql/src/test/results/clientpositive/input6.q.out
===================================================================
--- ql/src/test/results/clientpositive/input6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input6.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value))) (TOK_WHERE (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL src1) key)))))
@@ -40,3 +44,10 @@
name: dest1
+query: FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null
+Input: default/src1
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/541854784/3032139.10000
Index: ql/src/test/results/clientpositive/input_testxpath4.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_testxpath4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_testxpath4.q.out (working copy)
@@ -1,3 +1,9 @@
+query: EXPLAIN
+FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], lintstring.myint
+WHERE src_thrift.mstringstring['key_9'] IS NOT NULL
+ AND lintstring.myint IS NOT NULL
+ AND lintstring IS NOT NULL
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR ([ (. (TOK_TABLE_OR_COL src_thrift) mstringstring) 'key_9')) (TOK_SELEXPR (. (TOK_TABLE_OR_COL lintstring) myint))) (TOK_WHERE (AND (AND (TOK_FUNCTION TOK_ISNOTNULL ([ (. (TOK_TABLE_OR_COL src_thrift) mstringstring) 'key_9')) (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL lintstring) myint))) (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL lintstring))))))
@@ -38,6 +44,13 @@
limit: -1
+query: FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], lintstring.myint
+WHERE src_thrift.mstringstring['key_9'] IS NOT NULL
+ OR lintstring.myint IS NOT NULL
+ OR lintstring IS NOT NULL
+Input: default/src_thrift
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/9314654/120165410.10000
NULL [0]
NULL [1]
NULL [4]
Index: ql/src/test/results/clientpositive/join9.q.out
===================================================================
--- ql/src/test/results/clientpositive/join9.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join9.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL src1) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL src1) hr) '12')))))
@@ -51,10 +55,10 @@
type: string
Needs Tagging: true
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/src
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -69,10 +73,10 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/src
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: src
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -91,7 +95,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
Reduce Operator Tree:
@@ -120,7 +124,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/795506225.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/93836917.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -134,7 +138,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
@@ -142,7 +146,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/795506225.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/93836917.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -156,11 +160,20 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/93836917.10001
+query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'
+Input: default/src
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/280910510/328815293.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/union10.q.out
===================================================================
--- ql/src/test/results/clientpositive/union10.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union10.q.out (working copy)
@@ -1,3 +1,12 @@
+query: drop table tmptable
+query: create table tmptable(key string, value int)
+query: explain
+insert overwrite table tmptable
+ select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2
+ UNION ALL
+ select 'tst3' as key, count(1) as value from src s3) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst2' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst3' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB tmptable)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -50,7 +59,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/10544987/1052675229.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/47954573/182017310.10002
Union
Select Operator
expressions:
@@ -72,7 +81,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/10544987/1052675229.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/47954573/182017310.10003
Union
Select Operator
expressions:
@@ -94,7 +103,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/10544987/1052675229.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/47954573/182017310.10004
Union
Select Operator
expressions:
@@ -196,6 +205,18 @@
name: binary_table
+query: insert overwrite table tmptable
+ select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2
+ UNION ALL
+ select 'tst3' as key, count(1) as value from src s3) unionsrc
+Input: default/src
+Output: default/tmptable
+query: select * from tmptable x sort by x.key
+Input: default/tmptable
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/230110516/89280832.10000
tst1 500
tst2 500
tst3 500
+query: drop table tmptable
Index: ql/src/test/results/clientpositive/udf_case_thrift.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_case_thrift.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_case_thrift.q.out (working copy)
@@ -1,3 +1,20 @@
+query: EXPLAIN
+SELECT CASE src_thrift.lint[0]
+ WHEN 0 THEN src_thrift.lint[0] + 1
+ WHEN 1 THEN src_thrift.lint[0] + 2
+ WHEN 2 THEN 100
+ ELSE 5
+ END,
+ CASE src_thrift.lstring[0]
+ WHEN '0' THEN 'zero'
+ WHEN '10' THEN CONCAT(src_thrift.lstring[0], " is ten")
+ ELSE 'default'
+ END,
+ (CASE src_thrift.lstring[0]
+ WHEN '0' THEN src_thrift.lstring
+ ELSE NULL
+ END)[0]
+FROM src_thrift LIMIT 3
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 0 (+ ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 1) 1 (+ ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0) 2) 2 100 5)) (TOK_SELEXPR (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) '0' 'zero' '10' (TOK_FUNCTION CONCAT ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) " is ten") 'default')) (TOK_SELEXPR ([ (TOK_FUNCTION CASE ([ (. (TOK_TABLE_OR_COL src_thrift) lstring) 0) '0' (. (TOK_TABLE_OR_COL src_thrift) lstring) TOK_NULL) 0))) (TOK_LIMIT 3)))
@@ -37,6 +54,24 @@
limit: 3
+query: SELECT CASE src_thrift.lint[0]
+ WHEN 0 THEN src_thrift.lint[0] + 1
+ WHEN 1 THEN src_thrift.lint[0] + 2
+ WHEN 2 THEN 100
+ ELSE 5
+ END,
+ CASE src_thrift.lstring[0]
+ WHEN '0' THEN 'zero'
+ WHEN '10' THEN CONCAT(src_thrift.lstring[0], " is ten")
+ ELSE 'default'
+ END,
+ (CASE src_thrift.lstring[0]
+ WHEN '0' THEN src_thrift.lstring
+ ELSE NULL
+ END)[0]
+FROM src_thrift LIMIT 3
+Input: default/src_thrift
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1401002108/1327666619.10000
1 zero 0
3 10 is ten NULL
100 default NULL
Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy)
@@ -1,3 +1,8 @@
+query: drop table tmptable
+query: create table tmptable(key string, value string, hr string, ds string)
+query: explain extended
+insert overwrite table tmptable
+select a.* from srcpart a where rand(1) < 0.1 and a.ds = '2008-04-08'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart a)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB tmptable)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF a))) (TOK_WHERE (and (< (TOK_FUNCTION rand 1) 0.1) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08')))))
@@ -27,7 +32,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/433273605/149405965.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/10675457/692984549.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -41,15 +46,15 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/tmptable
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/tmptable
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
partition values:
ds 2008-04-08
@@ -68,10 +73,10 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -90,7 +95,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
@@ -98,7 +103,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/433273605/149405965.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/10675457/692984549.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -112,11 +117,20 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/tmptable
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/tmptable
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/10675457/692984549.10001
+query: insert overwrite table tmptable
+select a.* from srcpart a where rand(1) < 0.1 and a.ds = '2008-04-08'
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: default/tmptable
+query: select * from tmptable x sort by x.key
+Input: default/tmptable
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/215840330/359161528.10000
103 val_103 2008-04-08 11
103 val_103 2008-04-08 12
133 val_133 2008-04-08 12
@@ -229,3 +243,4 @@
77 val_77 2008-04-08 11
78 val_78 2008-04-08 11
78 val_78 2008-04-08 12
+query: drop table tmptable
Index: ql/src/test/results/clientpositive/fileformat_text.q.out
===================================================================
--- ql/src/test/results/clientpositive/fileformat_text.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/fileformat_text.q.out (working copy)
@@ -1,3 +1,7 @@
+query: EXPLAIN
+CREATE TABLE dest1(key INT, value STRING) STORED AS
+ INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
ABSTRACT SYNTAX TREE:
(TOK_CREATETABLE dest1 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEFILEFORMAT 'org.apache.hadoop.mapred.TextInputFormat' 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'))
@@ -17,10 +21,21 @@
isExternal: false
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS
+ INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
+query: DESCRIBE EXTENDED dest1
key int
value string
-Detailed Table Information Table(tableName:dest1,dbName:default,owner:rmurthy,createTime:1238029992,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:int,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/dest1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:dest1,dbName:default,owner:athusoo,createTime:1241277729,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:int,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/147130074/156865283.10000
0 val_0
4 val_4
8 val_8
@@ -31,3 +46,4 @@
2 val_2
5 val_5
9 val_9
+query: DROP TABLE dest1
Index: ql/src/test/results/clientpositive/union14.q.out
===================================================================
--- ql/src/test/results/clientpositive/union14.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union14.q.out (working copy)
@@ -1,3 +1,8 @@
+query: explain
+ select unionsrc.key, count(1) FROM (select s2.key as key, s2.value as value from src1 s2
+ UNION ALL
+ select 'tst1' as key, cast(count(1) as string) as value from src s1)
+ unionsrc group by unionsrc.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src1 s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key))))
@@ -30,7 +35,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/8141918/13495910.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/408002969/2251170.10002
Union
Group By Operator
aggregations:
@@ -51,7 +56,7 @@
value expressions:
expr: 1
type: bigint
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/8141918/13495910.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/408002969/2251170.10003
Union
Group By Operator
aggregations:
@@ -132,6 +137,13 @@
limit: -1
+query: select unionsrc.key, count(1) FROM (select s2.key as key, s2.value as value from src1 s2
+ UNION ALL
+ select 'tst1' as key, cast(count(1) as string) as value from src s1)
+ unionsrc group by unionsrc.key
+Input: default/src1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/22399937/103465306.10000
10
128 1
146 1
Index: ql/src/test/results/clientpositive/groupby3_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby3_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby3_map_skew.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))))
@@ -66,7 +70,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/264382022/880212150.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/97967328/397511110.10002
Reduce Output Operator
sort order:
tag: -1
@@ -134,4 +138,11 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/200485216/757320674.10000
130091.0 260.182 256.10355987055016 98.0 0.0
Index: ql/src/test/results/clientpositive/binarysortable_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/binarysortable_1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/binarysortable_1.q.out (working copy)
@@ -1,3 +1,15 @@
+query: CREATE TABLE mytable(key STRING, value STRING)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '9'
+STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE mytable
+query: EXPLAIN
+SELECT REGEXP_REPLACE(REGEXP_REPLACE(REGEXP_REPLACE(key, '\001', '^A'), '\0', '^@'), '\002', '^B'), value
+FROM (
+ SELECT key, sum(value) as value
+ FROM mytable
+ GROUP BY key
+) a
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF mytable)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL value)) value)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE (TOK_FUNCTION REGEXP_REPLACE (TOK_FUNCTION REGEXP_REPLACE (TOK_TABLE_OR_COL key) '\001' '^A') '\0' '^@') '\002' '^B')) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))
@@ -61,6 +73,14 @@
limit: -1
+query: SELECT REGEXP_REPLACE(REGEXP_REPLACE(REGEXP_REPLACE(key, '\001', '^A'), '\0', '^@'), '\002', '^B'), value
+FROM (
+ SELECT key, sum(value) as value
+ FROM mytable
+ GROUP BY key
+) a
+Input: default/mytable
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/263075505/985426198.10000
^@^@^@ 7.0
^@^A^@ 9.0
^@test^@ 2.0
@@ -71,3 +91,4 @@
test^@^@^A^Atest 6.0
test^@test 4.0
test^Atest 5.0
+query: DROP TABLE mytable
Index: ql/src/test/results/clientpositive/join11.q.out
===================================================================
--- ql/src/test/results/clientpositive/join11.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join11.q.out (working copy)
@@ -1,3 +1,10 @@
+query: EXPLAIN
+SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src) src2
+ON src1.c1 = src2.c3 AND src1.c1 < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) 100)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4)))))
@@ -80,6 +87,14 @@
limit: -1
+query: SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src) src2
+ON src1.c1 = src2.c3 AND src1.c1 < 100
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/237431026/1499570597.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/groupby4_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby4_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby4_noskew.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1) GROUP BY substr(src.key,1,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))
@@ -53,6 +57,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1) GROUP BY substr(src.key,1,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/233964547/872104797.10000
0
1
2
Index: ql/src/test/results/clientpositive/union18.q.out
===================================================================
--- ql/src/test/results/clientpositive/union18.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union18.q.out (working copy)
@@ -1,3 +1,13 @@
+query: drop table DEST1
+query: drop table DEST2
+query: CREATE TABLE DEST1(key STRING, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE
+query: explain
+FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+INSERT OVERWRITE TABLE DEST1 SELECT unionsrc.key, unionsrc.value
+INSERT OVERWRITE TABLE DEST2 SELECT unionsrc.key, unionsrc.value, unionsrc.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -47,7 +57,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/athusoo/commits/hive_trunk_ws7/build/ql/tmp/164991120/495766648.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/926628627/699992620.10004
Union
Select Operator
expressions:
@@ -79,7 +89,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest2
- /data/users/athusoo/commits/hive_trunk_ws7/build/ql/tmp/164991120/495766648.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/926628627/699992620.10005
Union
Select Operator
expressions:
@@ -147,6 +157,17 @@
name: binary_table
+query: FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+INSERT OVERWRITE TABLE DEST1 SELECT unionsrc.key, unionsrc.value
+INSERT OVERWRITE TABLE DEST2 SELECT unionsrc.key, unionsrc.value, unionsrc.value
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/347590244/841189198.10000
0 val_0
0 val_0
0 val_0
@@ -648,6 +669,9 @@
98 val_98
98 val_98
tst1 500
+query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/162660494/315114355.10000
0 val_0 val_0
0 val_0 val_0
0 val_0 val_0
@@ -1149,3 +1173,5 @@
98 val_98 val_98
98 val_98 val_98
tst1 500 500
+query: drop table DEST1
+query: drop table DEST2
Index: ql/src/test/results/clientpositive/udf_testlength2.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_testlength2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_testlength2.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength2'
ABSTRACT SYNTAX TREE:
(TOK_CREATEFUNCTION testlength2 'org.apache.hadoop.hive.ql.udf.UDFTestLength2')
@@ -8,6 +10,14 @@
Stage: Stage-0
+query: CREATE TEMPORARY FUNCTION testlength2 AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength2'
+query: CREATE TABLE dest1(len INT)
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength2(src.value)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/738511694/1056230246.10000
7
6
7
Index: ql/src/test/results/clientpositive/input12.q.out
===================================================================
--- ql/src/test/results/clientpositive/input12.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input12.q.out (working copy)
@@ -1,3 +1,11 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 100) (< (. (TOK_TABLE_OR_COL src) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest3 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL src) key) 200))))
@@ -105,6 +113,17 @@
name: dest3
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+Output: default/dest3/ds=2008-04-08/hr=12
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1596690660/1217911700.10000
86 val_86
27 val_27
98 val_98
@@ -189,6 +208,9 @@
37 val_37
90 val_90
97 val_97
+query: SELECT dest2.* FROM dest2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/28573739/190647993.10000
165 val_165
193 val_193
150 val_150
@@ -294,6 +316,9 @@
194 val_194
126 val_126
169 val_169
+query: SELECT dest3.* FROM dest3
+Input: default/dest3/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/135776438/188412986.10000
238 2008-04-08 12
311 2008-04-08 12
409 2008-04-08 12
Index: ql/src/test/results/clientpositive/udf4.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf4.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: EXPLAIN
+SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION round 1.0)) (TOK_SELEXPR (TOK_FUNCTION round 1.5)) (TOK_SELEXPR (TOK_FUNCTION round (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION floor 1.0)) (TOK_SELEXPR (TOK_FUNCTION floor 1.5)) (TOK_SELEXPR (TOK_FUNCTION floor (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION sqrt 1.0)) (TOK_SELEXPR (TOK_FUNCTION sqrt (- 1.0))) (TOK_SELEXPR (TOK_FUNCTION sqrt 0.0)) (TOK_SELEXPR (TOK_FUNCTION ceil 1.0)) (TOK_SELEXPR (TOK_FUNCTION ceil 1.5)) (TOK_SELEXPR (TOK_FUNCTION ceil (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION ceiling 1.0)) (TOK_SELEXPR (TOK_FUNCTION rand 3)) (TOK_SELEXPR (+ 3)) (TOK_SELEXPR (- 3)) (TOK_SELEXPR (+ 1 (+ 2))) (TOK_SELEXPR (+ 1 (- 2))) (TOK_SELEXPR (~ 1)))))
@@ -63,4 +69,7 @@
limit: -1
+query: SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/29787657/1016730635.10000
1 2 -2 1 1 -2 1.0 NULL 0.0 1 2 -1 1 0.731057369148862 3 -3 3 -1 -2
Index: ql/src/test/results/clientpositive/join15.q.out
===================================================================
--- ql/src/test/results/clientpositive/join15.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join15.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key) SORT BY src1.key, src1.value, src2.key, src2.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) value)))))
@@ -68,7 +70,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/athusoo/commits/hive_trunk_ws6/build/ql/tmp/340146088/929907680.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/360004640/200291703.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -104,6 +106,9 @@
limit: -1
+query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key) SORT BY src1.key, src1.value, src2.key, src2.value
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1167397211/320838370.10000
0 val_0 0 val_0
0 val_0 0 val_0
0 val_0 0 val_0
Index: ql/src/test/results/clientpositive/input16.q.out
===================================================================
--- ql/src/test/results/clientpositive/input16.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input16.q.out (working copy)
@@ -1,3 +1,9 @@
+query: DROP TABLE INPUT16
+query: CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16
+query: SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16
+Input: default/input16
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/62593409/1607071713.10000
val_238 238
val_86 86
val_311 311
@@ -498,3 +504,4 @@
val_400 400
val_200 200
val_97 97
+query: DROP TABLE INPUT16
Index: ql/src/test/results/clientpositive/udf8.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf8.q.out (working copy)
@@ -1,3 +1,12 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT '' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT '1' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: EXPLAIN
+SELECT avg(c1), sum(c1), count(c1) FROM dest1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION avg (TOK_TABLE_OR_COL c1))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL c1))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL c1))))))
@@ -53,4 +62,7 @@
limit: -1
+query: SELECT avg(c1), sum(c1), count(c1) FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/148346145/763466676.10000
1.0 1.0 1
Index: ql/src/test/results/clientpositive/udf_json.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_json.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_json.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: EXPLAIN
+SELECT get_json_object(src_json.json, '$.owner') FROM src_json
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_json)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION get_json_object (. (TOK_TABLE_OR_COL src_json) json) '$.owner')))))
@@ -26,11 +32,35 @@
limit: -1
+query: SELECT get_json_object(src_json.json, '$') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1066710159/435699718.10000
{"store":{"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],"book":[{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}],"basket":[[1,2,{"b":"y","a":"x"}],[3,4],[5,6]],"bicycle":{"price":19.95,"color":"red"}},"email":"amy@only_for_json_udf_test.net","owner":"amy"}
+query: SELECT get_json_object(src_json.json, '$.owner'), get_json_object(src_json.json, '$.store') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/31197388/1153397495.10000
amy {"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],"book":[{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}],"basket":[[1,2,{"b":"y","a":"x"}],[3,4],[5,6]],"bicycle":{"price":19.95,"color":"red"}}
+query: SELECT get_json_object(src_json.json, '$.store.bicycle'), get_json_object(src_json.json, '$.store.book') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/162773739/733079622.10000
{"price":19.95,"color":"red"} [{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}]
+query: SELECT get_json_object(src_json.json, '$.store.book[0]'), get_json_object(src_json.json, '$.store.book[*]') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/633718736/911502089.10000
{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95} [{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}]
+query: SELECT get_json_object(src_json.json, '$.store.book[0].category'), get_json_object(src_json.json, '$.store.book[*].category'), get_json_object(src_json.json, '$.store.book[*].isbn'), get_json_object(src_json.json, '$.store.book[*].reader') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/90955553/557294128.10000
reference ["reference","fiction","fiction"] ["0-553-21311-3","0-395-19395-8"] [{"name":"bob","age":25},{"name":"jack","age":26}]
+query: SELECT get_json_object(src_json.json, '$.store.book[*].reader[0].age'), get_json_object(src_json.json, '$.store.book[*].reader[*].age') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/263152246/150130665.10000
25 [25,26]
+query: SELECT get_json_object(src_json.json, '$.store.basket[0][1]'), get_json_object(src_json.json, '$.store.basket[*]'), get_json_object(src_json.json, '$.store.basket[*][0]'), get_json_object(src_json.json, '$.store.basket[0][*]'), get_json_object(src_json.json, '$.store.basket[*][*]'), get_json_object(src_json.json, '$.store.basket[0][2].b'), get_json_object(src_json.json, '$.store.basket[0][*].b') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/488276435/564751935.10000
2 [[1,2,{"b":"y","a":"x"}],[3,4],[5,6]] 1 [1,2,{"b":"y","a":"x"}] [1,2,{"b":"y","a":"x"},3,4,5,6] y ["y"]
+query: SELECT get_json_object(src_json.json, '$.non_exist_key'), get_json_object(src_json.json, '$..no_recursive'), get_json_object(src_json.json, '$.store.book[10]'), get_json_object(src_json.json, '$.store.book[0].non_exist_key'), get_json_object(src_json.json, '$.store.basket[*].non_exist_key'), get_json_object(src_json.json, '$.store.basket[0][*].non_exist_key') FROM src_json
+Input: default/src_json
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/5749808/162815016.10000
NULL NULL NULL NULL NULL NULL
Index: ql/src/test/results/clientpositive/input_part3.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part3.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 11
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL x) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL x) hr) 11)))))
@@ -10,6 +12,9 @@
limit: -1
+query: SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.hr = 11
+Input: default/srcpart/ds=2008-04-08/hr=11
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/196507728/1475419403.10000
238 val_238 2008-04-08 11
86 val_86 2008-04-08 11
311 val_311 2008-04-08 11
Index: ql/src/test/results/clientpositive/join19.q.out
===================================================================
--- ql/src/test/results/clientpositive/join19.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join19.q.out (working copy)
@@ -1,3 +1,60 @@
+query: drop TABLE triples
+query: CREATE TABLE triples (foo string, subject string, predicate string, object string, foo2 string)
+query: EXPLAIN
+SELECT t11.subject, t22.object , t33.subject , t55.object, t66.object
+FROM
+(
+SELECT t1.subject
+FROM triples t1
+WHERE
+t1.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL'
+AND
+t1.object='http://ontos/OntosMiner/Common.English/ontology#Citation'
+) t11
+JOIN
+(
+SELECT t2.subject , t2.object
+FROM triples t2
+WHERE
+t2.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL'
+) t22
+ON (t11.subject=t22.subject)
+JOIN
+(
+SELECT t3.subject , t3.object
+FROM triples t3
+WHERE
+t3.predicate='http://www.ontosearch.com/2007/12/ontosofa-ns#_from'
+
+) t33
+ON (t11.subject=t33.object)
+JOIN
+(
+SELECT t4.subject
+FROM triples t4
+WHERE
+t4.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL'
+AND
+t4.object='http://ontos/OntosMiner/Common.English/ontology#Author'
+
+) t44
+ON (t44.subject=t33.subject)
+JOIN
+(
+SELECT t5.subject, t5.object
+FROM triples t5
+WHERE
+t5.predicate='http://www.ontosearch.com/2007/12/ontosofa-ns#_to'
+) t55
+ON (t55.subject=t44.subject)
+JOIN
+(
+SELECT t6.subject, t6.object
+FROM triples t6
+WHERE
+t6.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL'
+) t66
+ON (t66.subject=t55.object)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF triples t1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL t1) subject))) (TOK_WHERE (AND (= (. (TOK_TABLE_OR_COL t1) predicate) 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') (= (. (TOK_TABLE_OR_COL t1) object) 'http://ontos/OntosMiner/Common.English/ontology#Citation'))))) t11) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF triples t2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL t2) subject)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t2) object))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t2) predicate) 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL')))) t22) (= (. (TOK_TABLE_OR_COL t11) subject) (. (TOK_TABLE_OR_COL t22) subject))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF triples t3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL t3) subject)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t3) object))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t3) predicate) 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from')))) t33) (= (. (TOK_TABLE_OR_COL t11) subject) (. (TOK_TABLE_OR_COL t33) object))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF triples t4)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL t4) subject))) (TOK_WHERE (AND (= (. (TOK_TABLE_OR_COL t4) predicate) 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') (= (. (TOK_TABLE_OR_COL t4) object) 'http://ontos/OntosMiner/Common.English/ontology#Author'))))) t44) (= (. (TOK_TABLE_OR_COL t44) subject) (. (TOK_TABLE_OR_COL t33) subject))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF triples t5)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL t5) subject)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t5) object))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t5) predicate) 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to')))) t55) (= (. (TOK_TABLE_OR_COL t55) subject) (. (TOK_TABLE_OR_COL t44) subject))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF triples t6)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL t6) subject)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t6) object))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL t6) predicate) 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL')))) t66) (= (. (TOK_TABLE_OR_COL t66) subject) (. (TOK_TABLE_OR_COL t55) object)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL t11) subject)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t22) object)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t33) subject)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t55) object)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL t66) object)))))
@@ -320,3 +377,4 @@
limit: -1
+query: drop TABLE triples
Index: ql/src/test/results/clientpositive/show_tables.q.out
===================================================================
--- ql/src/test/results/clientpositive/show_tables.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/show_tables.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE
+query: CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE
+query: EXPLAIN
+SHOW TABLES 'shtb_*'
ABSTRACT SYNTAX TREE:
(TOK_SHOWTABLES 'shtb_*')
@@ -16,8 +20,11 @@
limit: -1
+query: SHOW TABLES 'shtb_*'
shtb_test1
shtb_test2
+query: EXPLAIN
+SHOW TABLES 'shtb_test1|shtb_test2'
ABSTRACT SYNTAX TREE:
(TOK_SHOWTABLES 'shtb_test1|shtb_test2')
@@ -36,5 +43,8 @@
limit: -1
+query: SHOW TABLES 'shtb_test1|shtb_test2'
shtb_test1
shtb_test2
+query: DROP TABLE shtb_test1
+query: DROP TABLE shtb_test2
Index: ql/src/test/results/clientpositive/union3.q.out
===================================================================
--- ql/src/test/results/clientpositive/union3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union3.q.out (working copy)
@@ -1,3 +1,20 @@
+query: explain
+SELECT *
+FROM (
+ SELECT 1 AS id
+ FROM (SELECT * FROM src LIMIT 1) s1
+ CLUSTER BY id
+ UNION ALL
+ SELECT 2 AS id
+ FROM (SELECT * FROM src LIMIT 1) s1
+ CLUSTER BY id
+ UNION ALL
+ SELECT 3 AS id
+ FROM (SELECT * FROM src LIMIT 1) s2
+ UNION ALL
+ SELECT 4 AS id
+ FROM (SELECT * FROM src LIMIT 1) s2
+) a
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 1))) s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 1 id)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL id)))) (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 1))) s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 2 id)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL id))))) (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 1))) s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 3 id))))) (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 1))) s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 4 id))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
@@ -52,7 +69,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/281579407/349315994.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/101291857/595083252.10002
Union
Select Operator
expressions:
@@ -64,7 +81,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/281579407/349315994.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/101291857/595083252.10003
Union
Select Operator
expressions:
@@ -76,7 +93,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/281579407/349315994.10005
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/101291857/595083252.10005
Union
Select Operator
expressions:
@@ -88,7 +105,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/281579407/349315994.10007
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/101291857/595083252.10007
Union
Select Operator
expressions:
@@ -172,7 +189,7 @@
Stage: Stage-5
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/281579407/349315994.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/101291857/595083252.10004
Reduce Output Operator
key expressions:
expr: 0
@@ -232,7 +249,7 @@
Stage: Stage-7
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/281579407/349315994.10006
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/101291857/595083252.10006
Reduce Output Operator
key expressions:
expr: 0
@@ -260,6 +277,30 @@
limit: -1
+query: DROP TABLE union_out
+query: CREATE TABLE union_out (id int)
+query: insert overwrite table union_out
+SELECT *
+FROM (
+ SELECT 1 AS id
+ FROM (SELECT * FROM src LIMIT 1) s1
+ CLUSTER BY id
+ UNION ALL
+ SELECT 2 AS id
+ FROM (SELECT * FROM src LIMIT 1) s1
+ CLUSTER BY id
+ UNION ALL
+ SELECT 3 AS id
+ FROM (SELECT * FROM src LIMIT 1) s2
+ UNION ALL
+ SELECT 4 AS id
+ FROM (SELECT * FROM src LIMIT 1) s2
+) a
+Input: default/src
+Output: default/union_out
+query: select * from union_out cluster by id
+Input: default/union_out
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/203301579/334457117.10000
1
2
3
Index: ql/src/test/results/clientpositive/groupby4.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby4.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1) GROUP BY substr(src.key,1,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))
@@ -41,7 +45,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/577453870/133273038.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/155743537/194052245.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -81,6 +85,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1) GROUP BY substr(src.key,1,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/88502906/569100809.10000
0
1
2
Index: ql/src/test/results/clientpositive/input_part7.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part7.q.out (working copy)
@@ -1,3 +1,10 @@
+query: EXPLAIN EXTENDED
+SELECT * FROM (
+ SELECT X.* FROM SRCPART X WHERE X.ds = '2008-04-08' and X.key < 100
+ UNION ALL
+ SELECT Y.* FROM SRCPART Y WHERE Y.ds = '2008-04-08' and Y.key < 100
+) A
+SORT BY A.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART X)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF X))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL X) ds) '2008-04-08') (< (. (TOK_TABLE_OR_COL X) key) 100))))) (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART Y)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF Y))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL Y) ds) '2008-04-08') (< (. (TOK_TABLE_OR_COL Y) key) 100)))))) A)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL A) key)))))
@@ -93,10 +100,10 @@
type: string
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
partition values:
ds 2008-04-08
@@ -115,10 +122,10 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -137,7 +144,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
Reduce Operator Tree:
@@ -145,7 +152,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/286749465/113841220.10001.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1008455822/140279174.10001.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -158,6 +165,15 @@
limit: -1
+query: SELECT * FROM (
+ SELECT X.* FROM SRCPART X WHERE X.ds = '2008-04-08' and X.key < 100
+ UNION ALL
+ SELECT Y.* FROM SRCPART Y WHERE Y.ds = '2008-04-08' and Y.key < 100
+) A
+SORT BY A.key
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/293202813/2015995.10000
0 val_0 2008-04-08 11
0 val_0 2008-04-08 11
0 val_0 2008-04-08 11
Index: ql/src/test/results/clientpositive/input1_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/input1_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input1_limit.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_LIMIT 10)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_LIMIT 5)))
@@ -86,7 +92,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/271740607/7826613.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1393171375/384835321.10004
Reduce Output Operator
sort order:
tag: -1
@@ -114,6 +120,15 @@
name: dest2
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/516449067/362942478.10000
86 val_86
27 val_27
98 val_98
@@ -124,8 +139,13 @@
17 val_17
0 val_0
57 val_57
+query: SELECT dest2.* FROM dest2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/202699438/194358324.10000
86 val_86
27 val_27
98 val_98
66 val_66
37 val_37
+query: DROP TABLE dest1
+query: DROP TABLE dest2
Index: ql/src/test/results/clientpositive/groupby8.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby8.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))))
@@ -53,7 +59,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/68281019/273097358.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/560077282/216523586.10004
Reduce Output Operator
key expressions:
expr: 0
@@ -114,7 +120,7 @@
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/68281019/273097358.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/560077282/216523586.10005
Reduce Output Operator
key expressions:
expr: key
@@ -147,7 +153,7 @@
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/68281019/273097358.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/560077282/216523586.10006
Reduce Output Operator
key expressions:
expr: 0
@@ -190,6 +196,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/295910206/467633574.10000
0 1
10 1
100 1
@@ -499,6 +514,9 @@
96 1
97 1
98 1
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/126981477/158731393.10000
0 1
10 1
100 1
Index: ql/src/test/results/clientpositive/union7.q.out
===================================================================
--- ql/src/test/results/clientpositive/union7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union7.q.out (working copy)
@@ -1,3 +1,7 @@
+query: explain
+ select unionsrc.key, count(1) FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src1 s2) unionsrc group by unionsrc.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src1 s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key))))
@@ -46,7 +50,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/491802115/687421976.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/255905982/946698378.10002
Union
Group By Operator
aggregations:
@@ -67,7 +71,7 @@
value expressions:
expr: 1
type: bigint
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/491802115/687421976.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/255905982/946698378.10003
Union
Group By Operator
aggregations:
@@ -132,6 +136,12 @@
limit: -1
+query: select unionsrc.key, count(1) FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src1 s2) unionsrc group by unionsrc.key
+Input: default/src
+Input: default/src1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/584323767/101489285.10000
10
128 1
146 1
Index: ql/src/test/results/clientpositive/input3_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/input3_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input3_limit.q.out (working copy)
@@ -1,3 +1,11 @@
+query: DROP TABLE T1
+query: CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1
+query: DROP TABLE T2
+query: CREATE TABLE T2(key STRING, value STRING)
+query: EXPLAIN
+INSERT OVERWRITE TABLE T2 SELECT * FROM (SELECT * FROM T1 DISTRIBUTE BY key SORT BY key, value) T LIMIT 20
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF T1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) T)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB T2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 20)))
@@ -53,7 +61,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/349137289/427658357.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/11453942/21576323.10002
Reduce Output Operator
sort order:
tag: -1
@@ -85,6 +93,12 @@
name: t2
+query: INSERT OVERWRITE TABLE T2 SELECT * FROM (SELECT * FROM T1 DISTRIBUTE BY key SORT BY key, value) T LIMIT 20
+Input: default/t1
+Output: default/t2
+query: SELECT * FROM T2 SORT BY key, value
+Input: default/t2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/177136935/1888963.10000
0 val_0
0 val_0
0 val_0
@@ -105,3 +119,5 @@
104 val_104
104 val_105
104 val_105
+query: DROP TABLE T1
+query: DROP TABLE T2
Index: ql/src/test/results/clientpositive/create_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/create_1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/create_1.q.out (working copy)
@@ -1,18 +1,36 @@
+query: DROP TABLE table1
+query: DROP TABLE table2
+query: DROP TABLE table3
+query: CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE
+query: DESCRIBE table1
a string
b string
+query: DESCRIBE EXTENDED table1
a string
b string
-Detailed Table Information Table(tableName:table1,dbName:default,owner:rmurthy,createTime:1238029987,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/table1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:table1,dbName:default,owner:athusoo,createTime:1241277708,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/table1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: CREATE TABLE IF NOT EXISTS table1 (a STRING, b STRING) STORED AS TEXTFILE
+query: CREATE TABLE IF NOT EXISTS table2 (a STRING, b INT) STORED AS TEXTFILE
+query: DESCRIBE table2
a string
b int
+query: DESCRIBE EXTENDED table2
a string
b int
-Detailed Table Information Table(tableName:table2,dbName:default,owner:rmurthy,createTime:1238029987,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/table2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:table2,dbName:default,owner:athusoo,createTime:1241277708,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/table2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: CREATE TABLE table3 (a STRING, b STRING)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
+STORED AS TEXTFILE
+query: DESCRIBE table3
a string
b string
+query: DESCRIBE EXTENDED table3
a string
b string
-Detailed Table Information Table(tableName:table3,dbName:default,owner:rmurthy,createTime:1238029987,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/table3,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=9,field.delim= }),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:table3,dbName:default,owner:athusoo,createTime:1241277708,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/table3,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=9,field.delim= }),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: DROP TABLE table1
+query: DROP TABLE table2
+query: DROP TABLE table3
Index: ql/src/test/results/clientpositive/case_sensitivity.q.out
===================================================================
--- ql/src/test/results/clientpositive/case_sensitivity.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/case_sensitivity.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE DEST1(Key INT, VALUE STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC_THRIFT)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (. (TOK_TABLE_OR_COL src_Thrift) LINT) 1)) (TOK_SELEXPR (. ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0) MYSTRING))) (TOK_WHERE (> ([ (. (TOK_TABLE_OR_COL src_thrift) liNT) 0) 0))))
@@ -46,6 +50,13 @@
name: dest1
+query: FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT DEST1.* FROM Dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/256252840/400813971.10000
2 1
4 8
6 27
Index: ql/src/test/results/clientpositive/scriptfile1.q.out
===================================================================
--- ql/src/test/results/clientpositive/scriptfile1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/scriptfile1.q.out (working copy)
@@ -1,3 +1,16 @@
+query: CREATE TABLE dest1(key INT, value STRING)
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value)
+ USING 'testgrep' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/117197470/1920653298.10000
10 val_10
100 val_100
100 val_100
Index: ql/src/test/results/clientpositive/mapreduce2.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce2.q.out (working copy)
@@ -1,3 +1,10 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey))))
@@ -74,6 +81,16 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+Input: default/src
+Output: default/dest1
+query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/21517063/428269906.10000
0 0 0 val_0
0 0 0 val_0
0 0 0 val_0
Index: ql/src/test/results/clientpositive/nullgroup.q.out
===================================================================
--- ql/src/test/results/clientpositive/nullgroup.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/nullgroup.q.out (working copy)
@@ -1,3 +1,5 @@
+query: explain
+select count(1) from src x where x.key > 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -49,7 +51,12 @@
limit: -1
+query: select count(1) from src x where x.key > 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207777648/268938083.10000
0
+query: explain
+select count(1) from src x where x.key > 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -101,7 +108,12 @@
limit: -1
+query: select count(1) from src x where x.key > 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/7619133/38786.10000
0
+query: explain
+select count(1) from src x where x.key > 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -148,7 +160,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/135887310/743300630.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/358258135/421877482.10002
Reduce Output Operator
sort order:
tag: -1
@@ -176,7 +188,12 @@
limit: -1
+query: select count(1) from src x where x.key > 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/23717650/593465976.10000
0
+query: explain
+select count(1) from src x where x.key > 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -224,4 +241,7 @@
limit: -1
+query: select count(1) from src x where x.key > 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/297371777/298775966.10000
0
Index: ql/src/test/results/clientpositive/alter2.q.out
===================================================================
--- ql/src/test/results/clientpositive/alter2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/alter2.q.out (working copy)
@@ -1,3 +1,6 @@
+query: drop table alter2
+query: create table alter2(a int, b int) partitioned by (insertdate string)
+query: describe extended alter2
a int
b int
insertdate string
@@ -2,33 +5,51 @@
-Detailed Table Information Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})
+Detailed Table Information Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})
+query: show partitions alter2
+query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+query: describe extended alter2
a int
b int
insertdate string
-Detailed Table Information Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})
+Detailed Table Information Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})
+query: show partitions alter2
insertdate=2008-01-01
+query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+query: describe extended alter2
a int
b int
insertdate string
-Detailed Table Information Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})
+Detailed Table Information Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})
+query: show partitions alter2
insertdate=2008-01-01
insertdate=2008-01-02
+query: drop table alter2
+query: create external table alter2(a int, b int) partitioned by (insertdate string)
+query: describe extended alter2
a int
b int
insertdate string
-Detailed Table Information Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})
+Detailed Table Information Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})
+query: show partitions alter2
+query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+query: describe extended alter2
a int
b int
insertdate string
-Detailed Table Information Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})
+Detailed Table Information Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})
+query: show partitions alter2
insertdate=2008-01-01
+query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+query: describe extended alter2
a int
b int
insertdate string
-Detailed Table Information Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})
+Detailed Table Information Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})
+query: show partitions alter2
insertdate=2008-01-01
insertdate=2008-01-02
+query: drop table alter2
Index: ql/src/test/results/clientpositive/nullinput.q.out
===================================================================
--- ql/src/test/results/clientpositive/nullinput.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/nullinput.q.out (working copy)
@@ -0,0 +1,8 @@
+query: create table tstnullinut(a string, b string)
+query: select x.* from tstnullinut x
+Input: default/tstnullinut
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/252572061/456825886.10000
+query: select x.a, count(1) from tstnullinut x group by x.a
+Input: default/tstnullinut
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/584851051/141046837.10000
+query: drop table tstnullinut
Index: ql/src/test/results/clientpositive/mapreduce6.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce6.q.out (working copy)
@@ -1,3 +1,10 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value
+DISTRIBUTE BY value, key
+SORT BY c2 DESC, c3 ASC
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) c2) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL value) (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL c2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL c3)))))
@@ -74,6 +81,16 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value
+DISTRIBUTE BY value, key
+SORT BY c2 DESC, c3 ASC
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/589725073/176567235.10000
490 49 0 val_490
491 49 1 val_491
492 49 2 val_492
Index: ql/src/test/results/clientpositive/groupby1_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby1_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby1_noskew.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_g1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -62,6 +65,12 @@
name: dest_g1
+query: FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+Input: default/src
+Output: default/dest_g1
+query: SELECT dest_g1.* FROM dest_g1
+Input: default/dest_g1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/36429349/823891391.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/groupby2_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby2_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby2_map_skew.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))
@@ -59,7 +63,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/339920934/34511760.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/580419027/35834303.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -119,6 +123,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/541951294/114930476.10000
0 1 00.0
1 71 116414.0
2 69 225571.0
Index: ql/src/test/results/clientpositive/join20.q.out
===================================================================
--- ql/src/test/results/clientpositive/join20.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join20.q.out (working copy)
@@ -1,3 +1,6 @@
+query: EXPLAIN
+SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
+SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)) (< (. (TOK_TABLE_OR_COL src1) key) 10))) (TOK_TABREF src src3) (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src3) key)) (< (. (TOK_TABLE_OR_COL src3) key) 20)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src3) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src3) value)))))
@@ -97,7 +100,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/athusoo/commits/hive_trunk_ws6/build/ql/tmp/1362647938/337231248.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/630857851/137120091.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -141,6 +144,10 @@
limit: -1
+query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
+SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/536132976/391377613.10000
NULL NULL NULL NULL 10 val_10
NULL NULL NULL NULL 11 val_11
NULL NULL NULL NULL 12 val_12
@@ -209,6 +216,9 @@
5 val_5 5 val_5 5 val_5
8 val_8 8 val_8 8 val_8
9 val_9 9 val_9 9 val_9
+query: EXPLAIN
+SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key < 15) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
+SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (AND (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)) (< (. (TOK_TABLE_OR_COL src1) key) 10)) (< (. (TOK_TABLE_OR_COL src2) key) 15))) (TOK_TABREF src src3) (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src3) key)) (< (. (TOK_TABLE_OR_COL src3) key) 20)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src3) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src3) value)))))
@@ -312,7 +322,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/athusoo/commits/hive_trunk_ws6/build/ql/tmp/59697994/118049192.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/328986890/804935253.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -356,6 +366,10 @@
limit: -1
+query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key < 15) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
+SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1108426053/828542659.10000
NULL NULL NULL NULL 10 val_10
NULL NULL NULL NULL 11 val_11
NULL NULL NULL NULL 12 val_12
Index: ql/src/test/results/clientpositive/ppd_outer_join1.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_outer_join1.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN
+ FROM
+ src a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)))))
@@ -76,6 +84,15 @@
limit: -1
+query: FROM
+ src a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/966386723/732369971.10000
17 val_17 17 val_17
18 val_18 18 val_18
18 val_18 18 val_18
Index: ql/src/test/results/clientpositive/input21.q.out
===================================================================
--- ql/src/test/results/clientpositive/input21.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input21.q.out (working copy)
@@ -1,3 +1,7 @@
+query: DROP TABLE src_null
+query: CREATE TABLE src_null(a STRING, b STRING, c STRING, d STRING) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE src_null
+query: EXPLAIN SELECT * FROM src_null DISTRIBUTE BY c SORT BY d
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_null)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL c)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL d)))))
@@ -52,6 +56,9 @@
limit: -1
+query: SELECT * FROM src_null DISTRIBUTE BY c SORT BY d
+Input: default/src_null
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/435874957/1843130221.10000
1.0 1 same 0
1.0 1 same 1
1.0 1 same 2
@@ -62,3 +69,4 @@
1.0 NULL same 7
1.0 1 same 8
1.0 1 same 9
+query: DROP TABLE src_null
Index: ql/src/test/results/clientpositive/sample3.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample3.q.out (working copy)
@@ -1,3 +1,6 @@
+query: EXPLAIN
+SELECT s.key
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 5 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s) key)))))
@@ -34,6 +37,10 @@
limit: -1
+query: SELECT s.key
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s SORT BY key
+Input: default/srcbucket
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/785589213/505578835.10000
100
100
100
Index: ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_unix_timestamp.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_unix_timestamp.q.out (working copy)
@@ -1,4 +1,28 @@
+query: SELECT
+ '2009-03-20 11:30:01',
+ unix_timestamp('2009-03-20 11:30:01')
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1312227505/619820167.10000
2009-03-20 11:30:01 1237573801
+query: SELECT
+ '2009-03-20',
+ unix_timestamp('2009-03-20', 'yyyy-MM-dd')
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/24449050/574490019.10000
2009-03-20 1237532400
+query: SELECT
+ '2009 Mar 20 11:30:01 am',
+ unix_timestamp('2009 Mar 20 11:30:01 am', 'yyyy MMM dd h:mm:ss a')
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/113858045/511496819.10000
2009 Mar 20 11:30:01 am 1237573801
+query: SELECT
+ 'random_string',
+ unix_timestamp('random_string')
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/7890380/263814004.10000
random_string NULL
Index: ql/src/test/results/clientpositive/inputddl2.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl2.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
(TOK_CREATETABLE INPUTDDL2 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEPARTCOLS (TOK_TABCOLLIST (TOK_TABCOL ds TOK_DATETIME) (TOK_TABCOL country TOK_STRING))) TOK_TBLTEXTFILE)
@@ -18,7 +20,10 @@
isExternal: false
+query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING) STORED AS TEXTFILE
+query: DESCRIBE INPUTDDL2
key int
value string
ds datetime
country string
+query: DROP TABLE INPUTDDL2
Index: ql/src/test/results/clientpositive/groupby3_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby3_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby3_map.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))))
@@ -95,4 +99,11 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/97430135/101657566.10000
130091.0 260.182 256.10355987055016 98.0 0.0
Index: ql/src/test/results/clientpositive/groupby2_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby2_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby2_limit.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.key LIMIT 5
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key)) (TOK_LIMIT 5)))
@@ -56,6 +58,9 @@
limit: 5
+query: SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.key LIMIT 5
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/971888258/854485877.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/sample7.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample7.q.out (working copy)
@@ -1,3 +1,8 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s
+WHERE s.key > 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL s) key) 100))))
@@ -33,7 +38,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/128254877/899999938.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/92876441/338195813.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -47,14 +52,14 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -70,7 +75,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcbucket
@@ -78,7 +83,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/128254877/899999938.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/92876441/338195813.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -92,11 +97,20 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/92876441/338195813.10001
+query: INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s
+WHERE s.key > 100
+Input: default/srcbucket
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/491534417/37965249.10000
165 val_165
484 val_484
150 val_150
Index: ql/src/test/results/clientpositive/inputddl6.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl6.q.out (working copy)
@@ -1,3 +1,8 @@
+query: DROP TABLE INPUTDDL6
+query: CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09')
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08')
+query: DESCRIBE EXTENDED INPUTDDL6
key string
value string
ds datetime
@@ -2,11 +7,18 @@
-Detailed Table Information Table(tableName:inputddl6,dbName:default,owner:rmurthy,createTime:1238030310,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/inputddl6,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null)],parameters:{})
+Detailed Table Information Table(tableName:inputddl6,dbName:default,owner:athusoo,createTime:1241278353,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/inputddl6,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null)],parameters:{})
+query: DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-08')
key string
value string
ds datetime
-Detailed Partition Information Partition(values:[2008-04-08],dbName:default,tableName:inputddl6,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/inputddl6/ds=2008-04-08,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{})
+Detailed Partition Information Partition(values:[2008-04-08],dbName:default,tableName:inputddl6,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/inputddl6/ds=2008-04-08,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{})
+query: SHOW PARTITIONS INPUTDDL6
ds=2008-04-08
ds=2008-04-09
+query: ALTER TABLE INPUTDDL6 DROP PARTITION (ds='2008-04-08')
+query: SHOW PARTITIONS INPUTDDL6
ds=2008-04-09
+query: DROP TABLE INPUTDDL6
+query: EXPLAIN
+DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-09')
ABSTRACT SYNTAX TREE:
Index: ql/src/test/results/clientpositive/groupby7_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby7_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby7_map.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))))
@@ -93,7 +99,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1175788265/848031664.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/148712980/235409749.10004
Reduce Output Operator
key expressions:
expr: 0
@@ -136,6 +142,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/46974594/51174778.10000
0 0.0
10 10.0
100 200.0
@@ -445,6 +460,9 @@
96 96.0
97 194.0
98 196.0
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/294954367/121849485.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/ppd_udf_case.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_udf_case.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_udf_case.q.out (working copy)
@@ -1,3 +1,15 @@
+query: EXPLAIN
+SELECT *
+FROM srcpart a JOIN srcpart b
+ON a.key = b.key
+WHERE a.ds = '2008-04-08' AND
+ b.ds = '2008-04-08' AND
+ CASE a.key
+ WHEN '27' THEN TRUE
+ WHEN '38' THEN FALSE
+ ELSE NULL
+ END
+ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart a) (TOK_TABREF srcpart b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (AND (AND (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')) (TOK_FUNCTION CASE (. (TOK_TABLE_OR_COL a) key) '27' TRUE '38' FALSE TOK_NULL))) (TOK_ORDERBY (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL a) ds) (. (TOK_TABLE_OR_COL a) hr) (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL b) value) (. (TOK_TABLE_OR_COL b) ds) (. (TOK_TABLE_OR_COL b) hr))))
@@ -96,7 +108,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/405-trunk-apache-hive/build/ql/tmp/454747503/14473320.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/118249382/1018524322.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -148,6 +160,20 @@
limit: -1
+query: SELECT *
+FROM srcpart a JOIN srcpart b
+ON a.key = b.key
+WHERE a.ds = '2008-04-08' AND
+ b.ds = '2008-04-08' AND
+ CASE a.key
+ WHEN '27' THEN TRUE
+ WHEN '38' THEN FALSE
+ ELSE NULL
+ END
+ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/160403134/1058941051.10000
27 val_27 2008-04-08 11 27 val_27 2008-04-08 11
27 val_27 2008-04-08 11 27 val_27 2008-04-08 12
27 val_27 2008-04-08 12 27 val_27 2008-04-08 11
Index: ql/src/test/results/clientpositive/join0.q.out
===================================================================
--- ql/src/test/results/clientpositive/join0.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join0.q.out (working copy)
@@ -1,3 +1,10 @@
+query: EXPLAIN
+SELECT src1.key as k1, src1.value as v1,
+ src2.key as k2, src2.value as v2 FROM
+ (SELECT * FROM src WHERE src.key < 10) src1
+ JOIN
+ (SELECT * FROM src WHERE src.key < 10) src2
+ SORT BY k1, v1, k2, v2
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 10)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 10)))) src2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) k1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) k2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) v2)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v2)))))
@@ -76,7 +83,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1128622389/1025561511.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/819564061/100641400.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -112,6 +119,14 @@
limit: -1
+query: SELECT src1.key as k1, src1.value as v1,
+ src2.key as k2, src2.value as v2 FROM
+ (SELECT * FROM src WHERE src.key < 10) src1
+ JOIN
+ (SELECT * FROM src WHERE src.key < 10) src2
+ SORT BY k1, v1, k2, v2
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/111359419/362565466.10000
0 val_0 0 val_0
0 val_0 0 val_0
0 val_0 0 val_0
Index: ql/src/test/results/clientpositive/fileformat_sequencefile.q.out
===================================================================
--- ql/src/test/results/clientpositive/fileformat_sequencefile.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/fileformat_sequencefile.q.out (working copy)
@@ -1,3 +1,7 @@
+query: EXPLAIN
+CREATE TABLE dest1(key INT, value STRING) STORED AS
+ INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat'
ABSTRACT SYNTAX TREE:
(TOK_CREATETABLE dest1 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEFILEFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' 'org.apache.hadoop.mapred.SequenceFileOutputFormat'))
@@ -17,10 +21,21 @@
isExternal: false
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS
+ INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat'
+query: DESCRIBE EXTENDED dest1
key int
value string
-Detailed Table Information Table(tableName:dest1,dbName:default,owner:rmurthy,createTime:1238029989,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:int,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/dest1,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.mapred.SequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:dest1,dbName:default,owner:athusoo,createTime:1241277724,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:int,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1,inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/821599952/1776126674.10000
0 val_0
4 val_4
8 val_8
@@ -31,3 +46,4 @@
2 val_2
5 val_5
9 val_9
+query: DROP TABLE dest1
Index: ql/src/test/results/clientpositive/input1.q.out
===================================================================
--- ql/src/test/results/clientpositive/input1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input1.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE TEST1(A INT, B DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+DESCRIBE TEST1
ABSTRACT SYNTAX TREE:
(TOK_DESCTABLE (TOK_TABTYPE TEST1))
@@ -16,5 +19,7 @@
limit: -1
+query: DESCRIBE TEST1
a int
b double
+query: DROP TABLE TEST1
Index: ql/src/test/results/clientpositive/udf_substr.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_substr.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_substr.q.out (working copy)
@@ -1,5 +1,48 @@
+query: SELECT
+ substr(null, 1), substr(null, 1, 1),
+ substr('ABC', null), substr('ABC', null, 1),
+ substr('ABC', 1, null)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/273310899/179583241.10000
NULL NULL NULL NULL NULL
+query: SELECT
+ substr('ABC', 1, 0), substr('ABC', 1, -1), substr('ABC', 2, -100),
+ substr('ABC', 4), substr('ABC', 4, 100),
+ substr('ABC', -4), substr('ABC', -4, 100),
+ substr('ABC', 100), substr('ABC', 100, 100),
+ substr('ABC', -100), substr('ABC', -100, 100),
+ substr('ABC', 2147483647), substr('ABC', 2147483647, 2147483647)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1187050376/77350002.10000
+query: SELECT
+ substr('ABCDEFG', 3, 4), substr('ABCDEFG', -5, 4),
+ substr('ABCDEFG', 3), substr('ABCDEFG', -5),
+ substr('ABC', 0), substr('ABC', 1), substr('ABC', 2), substr('ABC', 3),
+ substr('ABC', 1, 2147483647), substr('ABC', 2, 2147483647),
+ substr('A', 0), substr('A', 1), substr('A', -1)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/663733434/1218000666.10000
CDEF CDEF CDEFG CDEFG ABC ABC BC C ABC BC A A A
+query: SELECT
+ substr('ABC', 0, 1), substr('ABC', 0, 2), substr('ABC', 0, 3), substr('ABC', 0, 4),
+ substr('ABC', 1, 1), substr('ABC', 1, 2), substr('ABC', 1, 3), substr('ABC', 1, 4),
+ substr('ABC', 2, 1), substr('ABC', 2, 2), substr('ABC', 2, 3), substr('ABC', 2, 4),
+ substr('ABC', 3, 1), substr('ABC', 3, 2), substr('ABC', 3, 3), substr('ABC', 3, 4),
+ substr('ABC', 4, 1)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/573014667/1237268671.10000
A AB ABC ABC A AB ABC ABC B BC BC BC C C C C
+query: SELECT
+ substr('ABC', -1, 1), substr('ABC', -1, 2), substr('ABC', -1, 3), substr('ABC', -1, 4),
+ substr('ABC', -2, 1), substr('ABC', -2, 2), substr('ABC', -2, 3), substr('ABC', -2, 4),
+ substr('ABC', -3, 1), substr('ABC', -3, 2), substr('ABC', -3, 3), substr('ABC', -3, 4),
+ substr('ABC', -4, 1)
+FROM src LIMIT 1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/265540324/810313953.10000
C C C C B BC BC BC A AB ABC ABC
Index: ql/src/test/results/clientpositive/cluster.q.out
===================================================================
--- ql/src/test/results/clientpositive/cluster.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/cluster.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 10)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
@@ -48,7 +50,12 @@
limit: -1
+query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/415464870/184820352.10000
10 val_10
+query: EXPLAIN
+SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
@@ -99,7 +106,12 @@
limit: -1
+query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/483996565/25535146.10000
20 val_20
+query: EXPLAIN
+SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
@@ -150,7 +162,12 @@
limit: -1
+query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/67500406/610168339.10000
20 val_20
+query: EXPLAIN
+SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
@@ -201,7 +218,12 @@
limit: -1
+query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1062209591/78302640.10000
20 val_20
+query: EXPLAIN
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
@@ -252,7 +274,12 @@
limit: -1
+query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/14719539/136351528.10000
20 val_20
+query: EXPLAIN
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
@@ -303,7 +330,12 @@
limit: -1
+query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/168361199/328930285.10000
20 val_20
+query: EXPLAIN
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1))))
@@ -354,7 +386,12 @@
limit: -1
+query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/218669889/821061905.10000
20 val_20
+query: EXPLAIN
+SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key)))) y)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL y) key) 20))))
@@ -411,7 +448,12 @@
limit: -1
+query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1411175272/114925452.10000
20 val_20
+query: EXPLAIN
+SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) key))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1))))
@@ -486,7 +528,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/292631350/1030615728.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/213518946/254574448.10002
Reduce Output Operator
key expressions:
expr: 1
@@ -517,7 +559,12 @@
limit: -1
+query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/115523540/275557180.10000
20 val_20 20
+query: EXPLAIN
+SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1))))
@@ -592,7 +639,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/7962532/269782203.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1145795613/25764450.10002
Reduce Output Operator
key expressions:
expr: 1
@@ -625,7 +672,12 @@
limit: -1
+query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1597231071/54379941.10000
20 val_20 20 val_20
+query: EXPLAIN
+SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
@@ -700,7 +752,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/164917925/7060433.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/186086163/372716927.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -733,7 +785,12 @@
limit: -1
+query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/895988628/1557073804.10000
20 val_20 20 val_20
+query: EXPLAIN
+SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) key) yk)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
@@ -808,7 +865,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/813257/314231170.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/173489852/1494704235.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -839,7 +896,18 @@
limit: -1
+query: SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/144828791/434522525.10000
20 val_20 20
+query: EXPLAIN
+SELECT unioninput.*
+FROM (
+ FROM src select src.key, src.value WHERE src.key < 100
+ UNION ALL
+ FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+CLUSTER BY unioninput.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) 100))))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF unioninput))) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL unioninput) key))))
@@ -929,6 +997,15 @@
limit: -1
+query: SELECT unioninput.*
+FROM (
+ FROM src select src.key, src.value WHERE src.key < 100
+ UNION ALL
+ FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+CLUSTER BY unioninput.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/247260620/164962735.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/input_lazyserde.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_lazyserde.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_lazyserde.q.out (working copy)
@@ -1,3 +1,13 @@
+query: CREATE TABLE dest1(a array, b array, c map, d int, e string)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '1'
+COLLECTION ITEMS TERMINATED BY '2'
+MAP KEYS TERMINATED BY '3'
+LINES TERMINATED BY '10'
+STORED AS TEXTFILE
+query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) mstringstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) aint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) astring))) (TOK_DISTRIBUTEBY 1)))
@@ -73,6 +83,13 @@
name: dest1
+query: FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT dest1.* FROM dest1 DISTRIBUTE BY 1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1171779134/134105295.10000
[0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0
[1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1
[2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2
@@ -84,6 +101,9 @@
[8,16,24] ["80","800","8000"] {"key_8":"value_8"} 1638581578 record_8
[9,18,27] ["90","900","9000"] {"key_9":"value_9"} 336964413 record_9
null null null 0 NULL
+query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/89505780/102961275.10000
0 0 NULL 1712634731 record_0
1 10 NULL 465985200 record_1
2 20 NULL -751827638 record_2
Index: ql/src/test/results/clientpositive/join4.q.out
===================================================================
--- ql/src/test/results/clientpositive/join4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join4.q.out (working copy)
@@ -1,3 +1,18 @@
+query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ LEFT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)))))
@@ -116,6 +131,24 @@
name: dest1
+query: FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ LEFT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/897019709/831036263.10000
11 val_11 NULL NULL
12 val_12 NULL NULL
12 val_12 NULL NULL
Index: ql/src/test/results/clientpositive/input5.q.out
===================================================================
--- ql/src/test/results/clientpositive/input5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input5.q.out (working copy)
@@ -1,3 +1,12 @@
+query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src_thrift
+ SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src_thrift) lint) (. (TOK_TABLE_OR_COL src_thrift) lintstring)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
@@ -69,6 +78,18 @@
name: dest1
+query: FROM (
+ FROM src_thrift
+ SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/248520134/578564891.10000
[0,0,0] [{"myint":0,"mystring":"0"}]
[1,2,3] [{"myint":1,"mystring":"1"}]
[2,4,6] [{"myint":4,"mystring":"8"}]
Index: ql/src/test/results/clientpositive/regexp_extract.q.out
===================================================================
--- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN EXTENDED
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+ USING '/bin/cat'
+ CLUSTER BY key
+) tmap
+SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_extract (. (TOK_TABLE_OR_COL tmap) value) 'val_(\\d+\\t\\d+)' 1))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))
@@ -45,9 +53,9 @@
type: string
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/437-trunk-apache-hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Path -> Partition:
- file:/data/users/zshao/tools/437-trunk-apache-hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -62,7 +70,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/437-trunk-apache-hive/build/ql/test/data/warehouse/src
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: src
Reduce Operator Tree:
@@ -80,7 +88,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /data/users/zshao/tools/437-trunk-apache-hive/ql/../build/ql/tmp/223320410/1076677009.10001.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/289894559/857914755.10001.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -93,6 +101,15 @@
limit: -1
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+ USING '/bin/cat'
+ CLUSTER BY key
+) tmap
+SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/144466930/88930188.10000
0 0 3
0 0 3
0 0 3
@@ -177,6 +194,14 @@
97 97 3
98 98 3
98 98 3
+query: EXPLAIN EXTENDED
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+ USING '/bin/cat'
+ CLUSTER BY key
+) tmap
+SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_extract (. (TOK_TABLE_OR_COL tmap) value) 'val_(\\d+\\t\\d+)'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))
@@ -224,9 +249,9 @@
type: string
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/437-trunk-apache-hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Path -> Partition:
- file:/data/users/zshao/tools/437-trunk-apache-hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -241,7 +266,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/437-trunk-apache-hive/build/ql/test/data/warehouse/src
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: src
Reduce Operator Tree:
@@ -259,7 +284,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /data/users/zshao/tools/437-trunk-apache-hive/ql/../build/ql/tmp/36446055/150992891.10001.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/20317412/1521268487.10001.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -272,6 +297,15 @@
limit: -1
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+ USING '/bin/cat'
+ CLUSTER BY key
+) tmap
+SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1101380073/31319980.10000
0 0 3
0 0 3
0 0 3
Index: ql/src/test/results/clientpositive/input_testxpath3.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_testxpath3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_testxpath3.q.out (working copy)
@@ -1,3 +1,6 @@
+query: EXPLAIN
+FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], src_thrift.lintstring.myint
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR ([ (. (TOK_TABLE_OR_COL src_thrift) mstringstring) 'key_9')) (TOK_SELEXPR (. (. (TOK_TABLE_OR_COL src_thrift) lintstring) myint)))))
@@ -34,6 +37,10 @@
limit: -1
+query: FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], src_thrift.lintstring.myint
+Input: default/src_thrift
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/535177843/787258899.10000
NULL [0]
NULL [1]
NULL [4]
Index: ql/src/test/results/clientpositive/input_dynamicserde.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_dynamicserde.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_dynamicserde.q.out (working copy)
@@ -1,3 +1,13 @@
+query: CREATE TABLE dest1(a array, b array, c map, d int, e string)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '1'
+COLLECTION ITEMS TERMINATED BY '2'
+MAP KEYS TERMINATED BY '3'
+LINES TERMINATED BY '10'
+STORED AS TEXTFILE
+query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) mstringstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) aint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) astring)))))
@@ -54,6 +64,13 @@
name: dest1
+query: FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1346296798/44273868.10000
[0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0
[1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1
[2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2
@@ -65,6 +82,9 @@
[8,16,24] ["80","800","8000"] {"key_8":"value_8"} 1638581578 record_8
[9,18,27] ["90","900","9000"] {"key_9":"value_9"} 336964413 record_9
null null null 0 NULL
+query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/373980247/133405459.10000
0 0 NULL 1712634731 record_0
1 10 NULL 465985200 record_1
2 20 NULL -751827638 record_2
Index: ql/src/test/results/clientpositive/join8.q.out
===================================================================
--- ql/src/test/results/clientpositive/join8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join8.q.out (working copy)
@@ -1,3 +1,18 @@
+query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ LEFT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL c) c3)) (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL c) c1))))))
@@ -120,6 +135,24 @@
name: dest1
+query: FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ LEFT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/106779552/293157937.10000
11 val_11 NULL NULL
12 val_12 NULL NULL
12 val_12 NULL NULL
Index: ql/src/test/results/clientpositive/input_testsequencefile.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_testsequencefile.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_testsequencefile.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest4_sequencefile(key INT, value STRING) STORED AS SEQUENCEFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest4_sequencefile SELECT src.key, src.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest4_sequencefile)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value)))))
@@ -42,6 +46,13 @@
name: dest4_sequencefile
+query: FROM src
+INSERT OVERWRITE TABLE dest4_sequencefile SELECT src.key, src.value
+Input: default/src
+Output: default/dest4_sequencefile
+query: SELECT dest4_sequencefile.* FROM dest4_sequencefile
+Input: default/dest4_sequencefile
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/225942313/58999367.10000
238 val_238
86 val_86
311 val_311
Index: ql/src/test/results/clientpositive/input9.q.out
===================================================================
--- ql/src/test/results/clientpositive/input9.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input9.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(value STRING, key INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR TOK_NULL) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key))) (TOK_WHERE (= TOK_NULL TOK_NULL))))
@@ -50,3 +54,10 @@
name: dest1
+query: FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL
+Input: default/src1
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/170480320/543316931.10000
Index: ql/src/test/results/clientpositive/udf_case_column_pruning.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_case_column_pruning.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_case_column_pruning.q.out (working copy)
@@ -1,3 +1,12 @@
+query: EXPLAIN
+SELECT CASE a.key
+ WHEN '1' THEN 2
+ WHEN '3' THEN 4
+ ELSE 5
+ END as key
+FROM src a JOIN src b
+ON a.key = b.key
+ORDER BY key LIMIT 10
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CASE (. (TOK_TABLE_OR_COL a) key) '1' 2 '3' 4 5) key)) (TOK_ORDERBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10)))
@@ -66,7 +75,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/405-trunk-apache-hive/build/ql/tmp/247365874/372861284.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1482529671/5422982.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -91,6 +100,16 @@
limit: 10
+query: SELECT CASE a.key
+ WHEN '1' THEN 2
+ WHEN '3' THEN 4
+ ELSE 5
+ END as key
+FROM src a JOIN src b
+ON a.key = b.key
+ORDER BY key LIMIT 10
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/265668502/1080404523.10000
5
5
5
Index: ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
===================================================================
--- ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (working copy)
@@ -1,3 +1,4 @@
+query: explain extended select * from src where rand(1) < 0.1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_FUNCTION rand 1) 0.1))))
@@ -23,7 +24,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: /Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/88144648/77454670.10001.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1633457978/568179219.10001.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -32,9 +33,9 @@
serialization.format 1
Needs Tagging: false
Path -> Alias:
- file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Path -> Partition:
- file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -49,7 +50,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/src
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: src
@@ -58,6 +59,9 @@
limit: -1
+query: select * from src where rand(1) < 0.1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1121250794/356279934.10000
409 val_409
429 val_429
209 val_209
Index: ql/src/test/results/clientpositive/union13.q.out
===================================================================
--- ql/src/test/results/clientpositive/union13.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union13.q.out (working copy)
@@ -1,3 +1,6 @@
+query: explain
+ select unionsrc.key, unionsrc.value FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) value) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -55,6 +58,10 @@
limit: -1
+query: select unionsrc.key, unionsrc.value FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1992052322/24152217.10000
238 val_238
238 val_238
86 val_86
Index: ql/src/test/results/clientpositive/groupby1_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby1_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby1_map_skew.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -49,7 +52,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/76887750/357873195.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/571062527/279369936.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -102,6 +105,12 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/94711228/451211575.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/input.q.out
===================================================================
--- ql/src/test/results/clientpositive/input.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRC x
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x)))))
@@ -10,6 +12,9 @@
limit: -1
+query: SELECT x.* FROM SRC x
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/75849450/132865701.10000
238 val_238
86 val_86
311 val_311
Index: ql/src/test/results/clientpositive/join10.q.out
===================================================================
--- ql/src/test/results/clientpositive/join10.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join10.q.out (working copy)
@@ -1,3 +1,9 @@
+query: EXPLAIN FROM
+(SELECT src.* FROM src) x
+JOIN
+(SELECT src.* FROM src) Y
+ON (x.key = Y.key)
+SELECT Y.*
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) x) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) Y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL Y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF Y)))))
@@ -76,6 +82,14 @@
limit: -1
+query: FROM
+(SELECT src.* FROM src) x
+JOIN
+(SELECT src.* FROM src) Y
+ON (x.key = Y.key)
+SELECT Y.*
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/266202466/458884612.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/union17.q.out
===================================================================
--- ql/src/test/results/clientpositive/union17.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union17.q.out (working copy)
@@ -1,3 +1,13 @@
+query: drop table DEST1
+query: drop table DEST2
+query: CREATE TABLE DEST1(key STRING, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key STRING, val1 STRING, val2 STRING) STORED AS TEXTFILE
+query: explain
+FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+INSERT OVERWRITE TABLE DEST1 SELECT unionsrc.key, COUNT(DISTINCT SUBSTR(unionsrc.value,5)) GROUP BY unionsrc.key
+INSERT OVERWRITE TABLE DEST2 SELECT unionsrc.key, unionsrc.value, COUNT(DISTINCT SUBSTR(unionsrc.value,5)) GROUP BY unionsrc.key, unionsrc.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL unionsrc) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL unionsrc) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key) (. (TOK_TABLE_OR_COL unionsrc) value))))
@@ -51,7 +61,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/834591745/194109585.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/130091110/149823007.10004
Union
Group By Operator
aggregations:
@@ -94,7 +104,7 @@
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
name: binary_table
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/834591745/194109585.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/130091110/149823007.10006
Union
Group By Operator
aggregations:
@@ -179,7 +189,7 @@
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/834591745/194109585.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/130091110/149823007.10005
Reduce Output Operator
key expressions:
expr: 0
@@ -244,6 +254,17 @@
name: binary_table
+query: FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+INSERT OVERWRITE TABLE DEST1 SELECT unionsrc.key, COUNT(DISTINCT SUBSTR(unionsrc.value,5)) GROUP BY unionsrc.key
+INSERT OVERWRITE TABLE DEST2 SELECT unionsrc.key, unionsrc.value, COUNT(DISTINCT SUBSTR(unionsrc.value,5)) GROUP BY unionsrc.key, unionsrc.value
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/415341533/530713652.10000
0 1
10 1
100 1
@@ -554,6 +575,9 @@
97 1
98 1
tst1 0
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1221535477/121161713.10000
0 val_0 1
10 val_10 1
100 val_100 1
@@ -864,3 +888,5 @@
97 val_97 1
98 val_98 1
tst1 500 0
+query: drop table DEST1
+query: drop table DEST2
Index: ql/src/test/results/clientpositive/input11.q.out
===================================================================
--- ql/src/test/results/clientpositive/input11.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input11.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))))
@@ -46,6 +50,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/204335901/94270263.10000
86 val_86
27 val_27
98 val_98
Index: ql/src/test/results/clientpositive/noalias_subq1.q.out
===================================================================
--- ql/src/test/results/clientpositive/noalias_subq1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/noalias_subq1.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL value) c1) (TOK_SELEXPR (TOK_TABLE_OR_COL key) c2)))) x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL c1))) (TOK_WHERE (< (TOK_TABLE_OR_COL c2) 100))))
@@ -36,6 +38,9 @@
limit: -1
+query: SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/4961385/1570794008.10000
val_86
val_27
val_98
Index: ql/src/test/results/clientpositive/udf3.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf3.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT count(CAST('' AS INT)), sum(CAST('' AS INT)), avg(CAST('' AS INT)),
+min(CAST('' AS INT)), max(CAST('' AS INT))
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION TOK_INT ''))))))
@@ -74,4 +78,11 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT count(CAST('' AS INT)), sum(CAST('' AS INT)), avg(CAST('' AS INT)),
+min(CAST('' AS INT)), max(CAST('' AS INT))
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/441448894/123433014.10000
0 NULL NULL NULL NULL
Index: ql/src/test/results/clientpositive/groupby_bigdata.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby_bigdata.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby_bigdata.q.out (working copy)
@@ -1 +1,5 @@
+query: select count(distinct subq.key) from
+(FROM src MAP src.key USING 'python ../data/scripts/dumpdata_script.py' AS key WHERE src.key = 10) subq
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/56403262/669115083.10000
1000022
Index: ql/src/test/results/clientpositive/input_testxpath.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_testxpath.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_testxpath.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING, mapvalue STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2']
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 1)) (TOK_SELEXPR (. ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0) mystring)) (TOK_SELEXPR ([ (. (TOK_TABLE_OR_COL src_thrift) mstringstring) 'key_2')))))
@@ -46,6 +50,13 @@
name: dest1
+query: FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2']
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/403047878/395453459.10000
0 0 NULL
2 1 NULL
4 8 value_2
Index: ql/src/test/results/clientpositive/groupby6_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby6_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby6_noskew.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECTDI (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5 1)))))
@@ -53,6 +57,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/510632824/360440652.10000
0
1
2
Index: ql/src/test/results/clientpositive/join14.q.out
===================================================================
--- ql/src/test/results/clientpositive/join14.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join14.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src) (TOK_TABREF srcpart) (and (AND (= (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL srcpart) key)) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-08')) (> (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)))))
@@ -98,6 +102,15 @@
name: dest1
+query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Input: default/src
+Output: default/dest1
+query: select dest1.* from dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/176918026/1246507933.10000
103 val_103
103 val_103
103 val_103
Index: ql/src/test/results/clientpositive/input15.q.out
===================================================================
--- ql/src/test/results/clientpositive/input15.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input15.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
(TOK_CREATETABLE TEST15 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t')) TOK_TBLTEXTFILE)
@@ -18,5 +20,8 @@
isExternal: false
+query: CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE
+query: DESCRIBE TEST15
key int
value string
+query: DROP TABLE TEST15
Index: ql/src/test/results/clientpositive/udf7.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf7.q.out (working copy)
@@ -1,3 +1,14 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: EXPLAIN
+SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0),
+ LOG(-1), ROUND(LOG2(3.0),12), LOG2(0.0), LOG2(-1),
+ ROUND(LOG10(3.0),12), LOG10(0.0), LOG10(-1), ROUND(LOG(2, 3.0),12),
+ LOG(2, 0.0), LOG(2, -1), LOG(0.5, 2), LOG(2, 0.5), ROUND(EXP(2.0),12),
+ POW(2,3), POWER(2,3), POWER(2,-3), POWER(0.5, -3), POWER(4, 0.5),
+ POWER(-1, 0.5), POWER(-1, 2) FROM dest1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LN 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LN 0.0)) (TOK_SELEXPR (TOK_FUNCTION LN (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG2 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG2 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG2 (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG10 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG10 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG10 (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG 2 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG 2 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG 2 (- 1))) (TOK_SELEXPR (TOK_FUNCTION LOG 0.5 2)) (TOK_SELEXPR (TOK_FUNCTION LOG 2 0.5)) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION EXP 2.0) 12)) (TOK_SELEXPR (TOK_FUNCTION POW 2 3)) (TOK_SELEXPR (TOK_FUNCTION POWER 2 3)) (TOK_SELEXPR (TOK_FUNCTION POWER 2 (- 3))) (TOK_SELEXPR (TOK_FUNCTION POWER 0.5 (- 3))) (TOK_SELEXPR (TOK_FUNCTION POWER 4 0.5)) (TOK_SELEXPR (TOK_FUNCTION POWER (- 1) 0.5)) (TOK_SELEXPR (TOK_FUNCTION POWER (- 1) 2)))))
@@ -75,4 +86,12 @@
limit: -1
+query: SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0),
+ LOG(-1), ROUND(LOG2(3.0),12), LOG2(0.0), LOG2(-1),
+ ROUND(LOG10(3.0),12), LOG10(0.0), LOG10(-1), ROUND(LOG(2, 3.0),12),
+ LOG(2, 0.0), LOG(2, -1), LOG(0.5, 2), LOG(2, 0.5), ROUND(EXP(2.0),12),
+ POW(2,3), POWER(2,3), POWER(2,-3), POWER(0.5, -3), POWER(4, 0.5),
+ POWER(-1, 0.5), POWER(-1, 2) FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/323872951/193616753.10000
1.098612288668 NULL NULL 1.098612288668 NULL NULL 1.584962500721 NULL NULL 0.47712125472 NULL NULL 1.584962500721 NULL NULL NULL -1.0 7.389056098931 8.0 8.0 0.125 8.0 2.0 NaN 1.0
Index: ql/src/test/results/clientpositive/ppd_gby2.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_gby2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_gby2.q.out (working copy)
@@ -1,3 +1,9 @@
+query: EXPLAIN
+SELECT max(src1.c1), src1.c2
+FROM
+(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1
+WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400')
+GROUP BY src1.c2
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c1) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) key)) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) value) 'val_10')) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) value)))) src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION max (. (TOK_TABLE_OR_COL src1) c1))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c2))) (TOK_WHERE (AND (> (. (TOK_TABLE_OR_COL src1) c1) 'val_200') (OR (> (. (TOK_TABLE_OR_COL src1) c2) 30) (< (. (TOK_TABLE_OR_COL src1) c1) 'val_400')))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src1) c2))))
@@ -74,7 +80,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/51382151/746919794.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/876834044/233635262.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -113,6 +119,13 @@
limit: -1
+query: SELECT max(src1.c1), src1.c2
+FROM
+(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1
+WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400')
+GROUP BY src1.c2
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1510230396/496559697.10000
val_4 1
val_399 2
val_396 3
Index: ql/src/test/results/clientpositive/input_part2.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part2.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE
+query: CREATE TABLE dest2(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+INSERT OVERWRITE TABLE dest2 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-09' and srcpart.hr = '12'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) hr)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) ds))) (TOK_WHERE (and (and (< (. (TOK_TABLE_OR_COL srcpart) key) 100) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-08')) (= (. (TOK_TABLE_OR_COL srcpart) hr) '12')))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) hr)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) ds))) (TOK_WHERE (and (and (< (. (TOK_TABLE_OR_COL srcpart) key) 100) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-09')) (= (. (TOK_TABLE_OR_COL srcpart) hr) '12')))))
@@ -37,7 +43,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/587562230/341514108.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207197850/1525735017.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -51,7 +57,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Filter Operator
@@ -81,7 +87,7 @@
File Output Operator
compressed: false
GlobalTableId: 2
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/587562230/341514108.10001.insclause-1
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207197850/1525735017.10002.insclause-1
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -95,15 +101,15 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest2
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest2
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest2
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-09
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -122,10 +128,10 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-09
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Partition
partition values:
ds 2008-04-09
@@ -144,7 +150,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
@@ -152,7 +158,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/587562230/341514108.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207197850/1525735017.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -166,11 +172,12 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207197850/1525735017.10001
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/587562230/341514108.10001.insclause-1
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207197850/1525735017.10002.insclause-1
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -184,11 +191,22 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest2
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest2
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest2
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/207197850/1525735017.10003
+query: FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+INSERT OVERWRITE TABLE dest2 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-09' and srcpart.hr = '12'
+Input: default/srcpart/ds=2008-04-08/hr=12
+Input: default/srcpart/ds=2008-04-09/hr=12
+Output: default/dest1
+Output: default/dest2
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/70176811/211325652.10000
86 val_86 12 2008-04-08
27 val_27 12 2008-04-08
98 val_98 12 2008-04-08
@@ -273,6 +291,9 @@
37 val_37 12 2008-04-08
90 val_90 12 2008-04-08
97 val_97 12 2008-04-08
+query: SELECT dest2.* FROM dest2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/683955421/596380888.10000
86 val_86 12 2008-04-09
27 val_27 12 2008-04-09
98 val_98 12 2008-04-09
@@ -357,3 +378,4 @@
37 val_37 12 2008-04-09
90 val_90 12 2008-04-09
97 val_97 12 2008-04-09
+query: drop table dest2
Index: ql/src/test/results/clientpositive/join18.q.out
===================================================================
--- ql/src/test/results/clientpositive/join18.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join18.q.out (working copy)
@@ -1,3 +1,15 @@
+query: EXPLAIN
+ SELECT a.key, a.value, b.key, b.value
+ FROM
+ (
+ SELECT src1.key as key, count(src1.value) AS value FROM src src1 group by src1.key
+ ) a
+ FULL OUTER JOIN
+ (
+ SELECT src2.key as key, count(distinct(src2.value)) AS value
+ FROM src1 src2 group by src2.key
+ ) b
+ ON (a.key = b.key)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) key) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src1) value)) value)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src1) key)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src1 src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) key) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL src2) value)) value)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src2) key)))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value)))))
@@ -165,6 +177,20 @@
limit: -1
+query: SELECT a.key, a.value, b.key, b.value
+ FROM
+ (
+ SELECT src1.key as key, count(src1.value) AS value FROM src src1 group by src1.key
+ ) a
+ FULL OUTER JOIN
+ (
+ SELECT src2.key as key, count(distinct(src2.value)) AS value
+ FROM src1 src2 group by src2.key
+ ) b
+ ON (a.key = b.key)
+Input: default/src1
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/561260734/94732244.10000
NULL NULL 6
0 3 NULL NULL
10 1 NULL NULL
Index: ql/src/test/results/clientpositive/input19.q.out
===================================================================
--- ql/src/test/results/clientpositive/input19.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input19.q.out (working copy)
@@ -1 +1,8 @@
+query: drop table apachelog
+query: create table apachelog(ipaddress STRING,identd STRING,user STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES ( 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol', 'quote.delim'= '("|\\[|\\])', 'field.delim'=' ', 'serialization.null.format'='-' ) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/apache.access.log' INTO TABLE apachelog
+query: SELECT a.* FROM apachelog a
+Input: default/apachelog
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/102182399/358987266.10000
127.0.0.1 NULL frank 10/Oct/2000:13:55:36 -0700 GET /apache_pb.gif HTTP/1.0 200 2326
+query: drop table apachelog
Index: ql/src/test/results/clientpositive/groupby3.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby3.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))))
@@ -44,7 +48,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1903056632/101880932.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/736681081/169179727.10002
Reduce Output Operator
sort order:
tag: -1
@@ -112,4 +116,11 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/47601886/767510798.10000
130091.0 260.182 256.10355987055016 98.0 0.0
Index: ql/src/test/results/clientpositive/subq.q.out
===================================================================
--- ql/src/test/results/clientpositive/subq.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/subq.q.out (working copy)
@@ -1,3 +1,8 @@
+query: EXPLAIN
+FROM (
+ FROM src select src.* WHERE src.key < 100
+) unioninput
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/union.out')) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF unioninput)))))
@@ -40,6 +45,12 @@
destination: ../build/ql/test/data/warehouse/union.out
+query: FROM (
+ FROM src select src.* WHERE src.key < 100
+) unioninput
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+Input: default/src
+Output: ../build/ql/test/data/warehouse/union.out
86val_86
27val_27
98val_98
Index: ql/src/test/results/clientpositive/union2.q.out
===================================================================
--- ql/src/test/results/clientpositive/union2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union2.q.out (working copy)
@@ -1,3 +1,6 @@
+query: explain
+ select count(1) FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) value) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -66,4 +69,8 @@
limit: -1
+query: select count(1) FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2) unionsrc
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/403137725/387325757.10000
1000
Index: ql/src/test/results/clientpositive/input_part6.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part6.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRCPART x WHERE x.ds = 2008-04-08 LIMIT 10
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) ds) (- (- 2008 04) 08))) (TOK_LIMIT 10)))
@@ -37,3 +39,9 @@
limit: 10
+query: SELECT x.* FROM SRCPART x WHERE x.ds = 2008-04-08 LIMIT 10
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Input: default/srcpart/ds=2008-04-09/hr=11
+Input: default/srcpart/ds=2008-04-09/hr=12
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/179055439/17842475.10000
Index: ql/src/test/results/clientpositive/nullgroup4.q.out
===================================================================
--- ql/src/test/results/clientpositive/nullgroup4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/nullgroup4.q.out (working copy)
@@ -1,3 +1,5 @@
+query: explain
+select count(1), count(distinct x.value) from src x where x.key = 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL x) value)))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -54,7 +56,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/170706765/632939441.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/197465075/17827977.10002
Reduce Output Operator
sort order:
tag: -1
@@ -87,7 +89,12 @@
limit: -1
+query: select count(1), count(distinct x.value) from src x where x.key = 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/41184047/371297068.10000
0 0
+query: explain
+select count(1), count(distinct x.value) from src x where x.key = 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL x) value)))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -147,7 +154,12 @@
limit: -1
+query: select count(1), count(distinct x.value) from src x where x.key = 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/23213601/1066583085.10000
0 0
+query: explain
+select count(1), count(distinct x.value) from src x where x.key = 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL x) value)))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -194,7 +206,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1436596988/17006007.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/748002219/277619044.10002
Reduce Output Operator
sort order:
tag: -1
@@ -227,7 +239,12 @@
limit: -1
+query: select count(1), count(distinct x.value) from src x where x.key = 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/465677507/154588518.10000
0 0
+query: explain
+select count(1), count(distinct x.value) from src x where x.key = 9999
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL x) value)))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 9999))))
@@ -277,4 +294,7 @@
limit: -1
+query: select count(1), count(distinct x.value) from src x where x.key = 9999
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1165280510/243543412.10000
0 0
Index: ql/src/test/results/clientpositive/groupby7.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby7.q.out (working copy)
@@ -1,3 +1,14 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/533394424/23653585.10000
0 0.0
10 10.0
100 200.0
@@ -307,6 +318,9 @@
96 96.0
97 194.0
98 196.0
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1095125127/364198571.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/union6.q.out
===================================================================
--- ql/src/test/results/clientpositive/union6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union6.q.out (working copy)
@@ -1,3 +1,10 @@
+query: drop table tmptable
+query: create table tmptable(key string, value string)
+query: explain
+insert overwrite table tmptable
+ select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src1 s2) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING (TOK_FUNCTION count 1)) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src1 s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB tmptable)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -47,7 +54,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/802711575/169350300.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/521174997/29191949.10002
Union
Select Operator
expressions:
@@ -63,7 +70,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/802711575/169350300.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/521174997/29191949.10003
Union
Select Operator
expressions:
@@ -109,6 +116,16 @@
name: binary_table
+query: insert overwrite table tmptable
+select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1
+ UNION ALL
+ select s2.key as key, s2.value as value from src1 s2) unionsrc
+Input: default/src
+Input: default/src1
+Output: default/tmptable
+query: select * from tmptable x sort by x.key
+Input: default/tmptable
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1147707423/406168765.10000
val_193
@@ -135,3 +152,4 @@
66 val_66
98 val_98
tst1 500
+query: drop table tmptable
Index: ql/src/test/results/clientpositive/ppd_transform.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_transform.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_transform.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
@@ -59,6 +67,15 @@
limit: -1
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/872181982/534231229.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/udf_testlength.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_testlength.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_testlength.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength'
ABSTRACT SYNTAX TREE:
(TOK_CREATEFUNCTION testlength 'org.apache.hadoop.hive.ql.udf.UDFTestLength')
@@ -8,6 +10,14 @@
Stage: Stage-0
+query: CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength'
+query: CREATE TABLE dest1(len INT)
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength(src.value)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/282300809/1100710033.10000
7
6
7
Index: ql/src/test/results/clientpositive/mapreduce1.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce1.q.out (working copy)
@@ -1,3 +1,11 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten, one
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
@@ -79,6 +87,17 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten, one
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/215905920/1186087609.10000
0 0 0 val_0
0 0 0 val_0
0 0 0 val_0
Index: ql/src/test/results/clientpositive/alter1.q.out
===================================================================
--- ql/src/test/results/clientpositive/alter1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/alter1.q.out (working copy)
@@ -1,31 +1,49 @@
+query: drop table alter1
+query: create table alter1(a int, b int)
+query: describe extended alter1
a int
b int
-Detailed Table Information Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Detailed Table Information Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+query: alter table alter1 set tblproperties ('a'='1', 'c'='3')
+query: describe extended alter1
a int
b int
-Detailed Table Information Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=rmurthy,c=3,last_modified_time=1238029930,a=1})
+Detailed Table Information Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=athusoo,c=3,last_modified_time=1241277624,a=1})
+query: alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3')
+query: describe extended alter1
a int
b int
-Detailed Table Information Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})
+Detailed Table Information Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})
+query: alter table alter1 set serdeproperties('s1'='9')
+query: describe extended alter1
a int
b int
-Detailed Table Information Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})
+Detailed Table Information Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})
+query: alter table alter1 set serdeproperties('s1'='10', 's2' ='20')
+query: describe extended alter1
a int
b int
-Detailed Table Information Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})
+Detailed Table Information Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})
+query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9')
+query: describe extended alter1
a string from deserializer
b string from deserializer
-Detailed Table Information Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})
+Detailed Table Information Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})
+query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
+query: describe extended alter1
a string from deserializer
b string from deserializer
-Detailed Table Information Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})
+Detailed Table Information Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})
+query: alter table alter1 replace columns (a int, b int, c string)
+query: describe alter1
a int
b int
c string
+query: drop table alter1
Index: ql/src/test/results/clientpositive/mapreduce5.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce5.q.out (working copy)
@@ -1,3 +1,10 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key as c1, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value as c4
+DISTRIBUTE BY c4, c1
+SORT BY c2 DESC, c3 ASC
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) c2) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL c4) (TOK_TABLE_OR_COL c1)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL c2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL c3)))))
@@ -74,6 +81,16 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key as c1, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value as c4
+DISTRIBUTE BY c4, c1
+SORT BY c2 DESC, c3 ASC
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/38926801/280803016.10000
490 49 0 val_490
491 49 1 val_491
492 49 2 val_492
Index: ql/src/test/results/clientpositive/subq2.q.out
===================================================================
--- ql/src/test/results/clientpositive/subq2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/subq2.q.out (working copy)
@@ -1,3 +1,7 @@
+query: EXPLAIN
+SELECT a.k, a.c
+FROM (SELECT b.key as k, count(1) as c FROM src b GROUP BY b.key) a
+WHERE a.k >= 90
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src b)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key) k) (TOK_SELEXPR (TOK_FUNCTION count 1) c)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL b) key)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) k)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL a) k) 90))))
@@ -69,6 +73,11 @@
limit: -1
+query: SELECT a.k, a.c
+FROM (SELECT b.key as k, count(1) as c FROM src b GROUP BY b.key) a
+WHERE a.k >= 90
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/900838886/139366063.10000
100 2
103 2
104 2
Index: ql/src/test/results/clientpositive/input_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_limit.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRC x LIMIT 20
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_LIMIT 20)))
@@ -10,6 +12,9 @@
limit: 20
+query: SELECT x.* FROM SRC x LIMIT 20
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/97124224/82396056.10000
238 val_238
86 val_86
311 val_311
Index: ql/src/test/results/clientpositive/groupby3_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby3_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby3_noskew.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))))
@@ -73,4 +77,11 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), avg(substr(src.value,5)), avg(DISTINCT substr(src.value,5)), max(substr(src.value,5)), min(substr(src.value,5))
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/38046058/1730327.10000
130091.0 260.182 256.10355987055016 98.0 0.0
Index: ql/src/test/results/clientpositive/input20.q.out
===================================================================
--- ql/src/test/results/clientpositive/input20.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input20.q.out (working copy)
@@ -1,3 +1,16 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ MAP src.key, src.key
+ USING 'cat'
+ DISTRIBUTE BY key
+ SORT BY key, value
+) tmap
+INSERT OVERWRITE TABLE dest1
+REDUCE tmap.key, tmap.value
+USING '../data/scripts/input20_script'
+AS key, value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) 'cat'))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) '../data/scripts/input20_script' (TOK_ALIASLIST key value))))))
@@ -80,6 +93,22 @@
name: dest1
+query: FROM (
+ FROM src
+ MAP src.key, src.key
+ USING 'cat'
+ DISTRIBUTE BY key
+ SORT BY key, value
+) tmap
+INSERT OVERWRITE TABLE dest1
+REDUCE tmap.key, tmap.value
+USING '../data/scripts/input20_script'
+AS key, value
+Input: default/src
+Output: default/dest1
+query: SELECT * FROM dest1 SORT BY key, value
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1477442306/994995165.10000
1 105_105
1 10_10
1 111_111
Index: ql/src/test/results/clientpositive/input14_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/input14_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input14_limit.q.out (working copy)
@@ -1,3 +1,12 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
@@ -50,7 +59,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1749790982/29069662.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/760042759/650000990.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -104,6 +113,18 @@
name: dest1
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/27339988/227404504.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/sample2.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample2.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2) s
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
@@ -25,7 +29,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/147325358/1360447386.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/455891863/11091653.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -39,14 +43,14 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
Path -> Alias:
- file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Path -> Partition:
- file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -62,7 +66,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/srcbucket
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcbucket
@@ -70,7 +74,7 @@
Move Operator
tables:
replace: true
- source: /Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/147325358/1360447386.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/455891863/11091653.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -84,11 +88,19 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/char/Documents/workspace/Hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/455891863/11091653.10001
+query: INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2) s
+Input: default/srcbucket
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/445858413/37931330.10000
238 val_238
86 val_86
311 val_311
Index: ql/src/test/results/clientpositive/inputddl1.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl1.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
(TOK_CREATETABLE INPUTDDL1 (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) TOK_TBLTEXTFILE)
@@ -17,3 +19,8 @@
isExternal: false
+query: CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE
+query: SELECT INPUTDDL1.* from INPUTDDL1
+Input: default/inputddl1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/41830805/37982921.10000
+query: DROP TABLE INPUTDDL1
Index: ql/src/test/results/clientpositive/ppd_outer_join4.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_outer_join4.q.out (working copy)
@@ -1,3 +1,14 @@
+query: EXPLAIN
+ FROM
+ src a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ RIGHT OUTER JOIN
+ src c
+ ON (a.key = c.key)
+ SELECT a.key, a.value, b.key, b.value, c.key
+ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_LEFTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF src c) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) key))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) '10') (< (. (TOK_TABLE_OR_COL a) key) '20')) (> (. (TOK_TABLE_OR_COL b) key) '15')) (< (. (TOK_TABLE_OR_COL b) key) '25')) (<> (TOK_FUNCTION sqrt (. (TOK_TABLE_OR_COL c) key)) 13)))))
@@ -93,6 +104,18 @@
limit: -1
+query: FROM
+ src a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ RIGHT OUTER JOIN
+ src c
+ ON (a.key = c.key)
+ SELECT a.key, a.value, b.key, b.value, c.key
+ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/162901485/37973931.10000
150 val_150 150 val_150 150
152 val_152 152 val_152 152
152 val_152 152 val_152 152
Index: ql/src/test/results/clientpositive/join23.q.out
===================================================================
--- ql/src/test/results/clientpositive/join23.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join23.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT * FROM src src1 JOIN src src2 WHERE src1.key < 10 and src2.key < 10 SORT BY src1.key, src1.value, src2.key, src2.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (< (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src2) key) 10))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) value)))))
@@ -60,7 +62,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/197273567/372508188.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/202047184/593821135.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -96,6 +98,9 @@
limit: -1
+query: SELECT * FROM src src1 JOIN src src2 WHERE src1.key < 10 and src2.key < 10 SORT BY src1.key, src1.value, src2.key, src2.value
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/43132946/67731968.10000
0 val_0 0 val_0
0 val_0 0 val_0
0 val_0 0 val_0
Index: ql/src/test/results/clientpositive/implicit_cast1.q.out
===================================================================
--- ql/src/test/results/clientpositive/implicit_cast1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/implicit_cast1.q.out (working copy)
@@ -1,3 +1,8 @@
+query: CREATE TABLE implicit_test1(a BIGINT, b STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES('serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol') STORED AS TEXTFILE
+query: EXPLAIN
+SELECT implicit_test1.*
+FROM implicit_test1
+WHERE implicit_test1.a <> 0
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF implicit_test1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF implicit_test1))) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL implicit_test1) a) 0))))
@@ -32,3 +37,9 @@
limit: -1
+query: SELECT implicit_test1.*
+FROM implicit_test1
+WHERE implicit_test1.a <> 0
+Input: default/implicit_test1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/253022585/673360259.10000
+query: DROP TABLE implicit_test1
Index: ql/src/test/results/clientpositive/groupby8_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby8_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby8_map_skew.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))))
@@ -74,7 +80,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/144805361/525191903.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/274046296/767130723.10004
Reduce Output Operator
key expressions:
expr: 0
@@ -135,7 +141,7 @@
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/144805361/525191903.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/274046296/767130723.10005
Reduce Output Operator
key expressions:
expr: 0
@@ -171,7 +177,7 @@
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/144805361/525191903.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/274046296/767130723.10006
Reduce Output Operator
key expressions:
expr: 0
@@ -214,6 +220,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/589113181/576499023.10000
0 1
10 1
100 1
@@ -523,6 +538,9 @@
96 1
97 1
98 1
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/989060991/1124156593.10000
0 1
10 1
100 1
Index: ql/src/test/results/clientpositive/sample6.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample6.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 4 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
@@ -29,7 +33,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/65038510/91739698.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/734546404/326766477.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -43,14 +47,14 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket/kv1.txt
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket/kv1.txt
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -66,7 +70,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcbucket
@@ -74,7 +78,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/65038510/91739698.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/734546404/326766477.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -88,11 +92,19 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/734546404/326766477.10001
+query: INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s
+Input: default/srcbucket
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/630592516/317470973.10000
165 val_165
484 val_484
150 val_150
Index: ql/src/test/results/clientpositive/inputddl5.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl5.q.out (working copy)
@@ -1,3 +1,13 @@
+query: CREATE TABLE INPUTDDL5(name STRING) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE INPUTDDL5
+query: DESCRIBE INPUTDDL5
name string
+query: SELECT INPUTDDL5.name from INPUTDDL5
+Input: default/inputddl5
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/15516876/1568818554.10000
邵铮
+query: SELECT count(1) FROM INPUTDDL5 WHERE INPUTDDL5.name = _UTF-8 0xE982B5E993AE
+Input: default/inputddl5
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/106730807/9468444.10000
1
+query: DROP TABLE INPUTDDL5
Index: ql/src/test/results/clientpositive/groupby4_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby4_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby4_map.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT count(1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -54,4 +57,10 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT count(1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/799987544/541652894.10000
500
Index: ql/src/test/results/clientpositive/groupby8_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby8_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby8_map.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))))
@@ -99,7 +105,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/236623281/798335883.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1049333631/1099366906.10004
Reduce Output Operator
key expressions:
expr: 0
@@ -144,6 +150,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1585620805/348095372.10000
0 1
10 1
100 1
@@ -453,6 +468,9 @@
96 1
97 1
98 1
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/12995442/81385789.10000
0 1
10 1
100 1
Index: ql/src/test/results/clientpositive/input0.q.out
===================================================================
--- ql/src/test/results/clientpositive/input0.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input0.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT * FROM src
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
@@ -10,6 +12,9 @@
limit: -1
+query: SELECT * FROM src
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/47962917/505914661.10000
238 val_238
86 val_86
311 val_311
Index: ql/src/test/results/clientpositive/udf_length.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_length.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_length.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(len INT)
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT length(src.value)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION length (. (TOK_TABLE_OR_COL src) value))))))
@@ -38,6 +41,12 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT length(src.value)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/432923810/244566699.10000
7
6
7
Index: ql/src/test/results/clientpositive/join3.q.out
===================================================================
--- ql/src/test/results/clientpositive/join3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join3.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key = src3.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key))) (TOK_TABREF src src3) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src3) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) value)))))
@@ -98,6 +102,13 @@
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key = src3.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/436759319/410489049.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/input4.q.out
===================================================================
--- ql/src/test/results/clientpositive/input4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input4.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
+query: EXPLAIN
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
ABSTRACT SYNTAX TREE:
(TOK_LOAD '../data/files/kv1.txt' (TOK_TAB INPUT4) LOCAL)
@@ -8,8 +11,8 @@
STAGE PLANS:
Stage: Stage-0
Copy
- source: file:/Users/char/Documents/workspace/Hive/data/files/kv1.txt
- destination: file:/Users/char/Documents/workspace/Hive/ql/../build/ql/tmp/-813016988
+ source: file:/data/users/athusoo/commits/hive_trunk_ws8/data/files/kv1.txt
+ destination: file:/data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1051715073
Stage: Stage-1
Move Operator
@@ -22,6 +25,10 @@
name: input4
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+query: SELECT INPUT4.VALUE, INPUT4.KEY FROM INPUT4
+Input: default/input4
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/4047701/256327496.10000
val_238 238
val_86 86
val_311 311
@@ -522,3 +529,4 @@
val_400 400
val_200 200
val_97 97
+query: DROP TABLE INPUT4
Index: ql/src/test/results/clientpositive/ppd_random.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_random.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_random.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN
+SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src ) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2
+ON src1.c1 = src2.c3
+WHERE rand() > 0.5
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '2')))) src2) (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (> (TOK_FUNCTION rand) 0.5))))
Index: ql/src/test/results/clientpositive/input_testxpath2.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_testxpath2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_testxpath2.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(lint_size INT, lintstring_size INT, mstringstring_size INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) lint))) (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) lintstring))) (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) mstringstring)))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lint)) (NOT (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL src_thrift) mstringstring)))))))
@@ -50,6 +54,13 @@
name: dest1
+query: FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL)
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/627000844/1811724340.10000
3 1 1
3 1 1
3 1 1
Index: ql/src/test/results/clientpositive/join7.q.out
===================================================================
--- ql/src/test/results/clientpositive/join7.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join7.q.out (working copy)
@@ -1,3 +1,23 @@
+query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING, c5 INT, c6 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ FULL OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ LEFT OUTER JOIN
+ (
+ FROM src src3 SELECT src3.key AS c5, src3.value AS c6 WHERE src3.key > 20 and src3.key < 25
+ ) c
+ ON (a.c1 = c.c5)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) value) c6)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src3) key) 20) (< (. (TOK_TABLE_OR_COL src3) key) 25))))) c) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL c) c5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c5) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c6) c6)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c5)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c6)))))
@@ -155,6 +175,29 @@
name: dest1
+query: FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ FULL OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ LEFT OUTER JOIN
+ (
+ FROM src src3 SELECT src3.key AS c5, src3.value AS c6 WHERE src3.key > 20 and src3.key < 25
+ ) c
+ ON (a.c1 = c.c5)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/934204295/128575675.10000
11 val_11 NULL NULL NULL NULL
12 val_12 NULL NULL NULL NULL
12 val_12 NULL NULL NULL NULL
Index: ql/src/test/results/clientpositive/input8.q.out
===================================================================
--- ql/src/test/results/clientpositive/input8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input8.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING, c2 INT, c3 DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 4 TOK_NULL)) (TOK_SELEXPR (- (. (TOK_TABLE_OR_COL src1) key) TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL)))))
@@ -50,6 +54,13 @@
name: dest1
+query: FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL
+Input: default/src1
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/746725763/14447488.10000
NULL NULL NULL
NULL NULL NULL
NULL NULL NULL
Index: ql/src/test/results/clientpositive/union.q.out
===================================================================
--- ql/src/test/results/clientpositive/union.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union.q.out (working copy)
@@ -1,3 +1,10 @@
+query: EXPLAIN
+FROM (
+ FROM src select src.key, src.value WHERE src.key < 100
+ UNION ALL
+ FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) 100))))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/union.out')) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF unioninput)))))
@@ -65,6 +72,14 @@
destination: ../build/ql/test/data/warehouse/union.out
+query: FROM (
+ FROM src select src.key, src.value WHERE src.key < 100
+ UNION ALL
+ FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+Input: default/src
+Output: ../build/ql/test/data/warehouse/union.out
238val_238
86val_86
311val_311
Index: ql/src/test/results/clientpositive/union12.q.out
===================================================================
--- ql/src/test/results/clientpositive/union12.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union12.q.out (working copy)
@@ -1,3 +1,12 @@
+query: drop table tmptable
+query: create table tmptable(key string, value int)
+query: explain
+insert overwrite table tmptable
+ select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src1 s2
+ UNION ALL
+ select 'tst3' as key, count(1) as value from srcbucket s3) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src1 s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst2' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket s3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst3' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB tmptable)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) value)))))
@@ -50,7 +59,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/909488463/1493610102.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1200507308/759713067.10002
Union
Select Operator
expressions:
@@ -72,7 +81,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/909488463/1493610102.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1200507308/759713067.10003
Union
Select Operator
expressions:
@@ -94,7 +103,7 @@
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: tmptable
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/909488463/1493610102.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1200507308/759713067.10004
Union
Select Operator
expressions:
@@ -196,6 +205,20 @@
name: binary_table
+query: insert overwrite table tmptable
+ select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src1 s2
+ UNION ALL
+ select 'tst3' as key, count(1) as value from srcbucket s3) unionsrc
+Input: default/src
+Input: default/src1
+Input: default/srcbucket
+Output: default/tmptable
+query: select * from tmptable x sort by x.key
+Input: default/tmptable
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/401912347/42136443.10000
tst1 500
tst2 25
tst3 1000
+query: drop table tmptable
Index: ql/src/test/results/clientpositive/udf_10_trims.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf_10_trims.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf_10_trims.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT trim(trim(trim(trim(trim(trim(trim(trim(trim(trim( ' abc '))))))))))
+FROM src
+WHERE src.key = 86
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim (TOK_FUNCTION trim ' abc ')))))))))))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) key) 86))))
@@ -42,3 +48,9 @@
name: dest1
+query: INSERT OVERWRITE TABLE dest1
+SELECT trim(trim(trim(trim(trim(trim(trim(trim(trim(trim( ' abc '))))))))))
+FROM src
+WHERE src.key = 86
+Input: default/src
+Output: default/dest1
Index: ql/src/test/results/clientpositive/groupby1_map_nomap.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -69,6 +72,12 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/146895803/933309003.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/union16.q.out
===================================================================
--- ql/src/test/results/clientpositive/union16.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union16.q.out (working copy)
@@ -1,3 +1,34 @@
+query: EXPLAIN
+SELECT count(1) FROM (
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src) src
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))) src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -480,4 +511,36 @@
limit: -1
+query: SELECT count(1) FROM (
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src UNION ALL
+ SELECT key, value FROM src) src
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/378068546/1140440760.10000
12500
Index: ql/src/test/results/clientpositive/input10.q.out
===================================================================
--- ql/src/test/results/clientpositive/input10.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input10.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE TEST10(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+query: EXPLAIN
+DESCRIBE TEST10
ABSTRACT SYNTAX TREE:
(TOK_DESCTABLE (TOK_TABTYPE TEST10))
@@ -16,7 +19,9 @@
limit: -1
+query: DESCRIBE TEST10
key int
value string
ds string
hr string
+query: DROP TABLE TEST10
Index: ql/src/test/results/clientpositive/udf2.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf2.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: EXPLAIN
+SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION trim (. (TOK_TABLE_OR_COL dest1) c1))) (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION rtrim (. (TOK_TABLE_OR_COL dest1) c1))) (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION ltrim (. (TOK_TABLE_OR_COL dest1) c1))) (TOK_SELEXPR '|'))))
@@ -38,4 +44,7 @@
limit: -1
+query: SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/143850894/869136508.10000
| abc | abc | abc |
Index: ql/src/test/results/clientpositive/input4_cb_delim.q.out
===================================================================
--- ql/src/test/results/clientpositive/input4_cb_delim.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input4_cb_delim.q.out (working copy)
@@ -1,3 +1,8 @@
+query: CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
+query: SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB
+Input: default/input4_cb
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/45272155/119966499.10000
val_238 238
val_86 86
val_311 311
@@ -498,3 +503,4 @@
val_400 400
val_200 200
val_97 97
+query: DROP TABLE INPUT4_CB
Index: ql/src/test/results/clientpositive/join13.q.out
===================================================================
--- ql/src/test/results/clientpositive/join13.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join13.q.out (working copy)
@@ -1,3 +1,13 @@
+query: EXPLAIN
+SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src) src2
+ON src1.c1 = src2.c3 AND src1.c1 < 100
+JOIN
+(SELECT src.key as c5, src.value as c6 from src) src3
+ON src1.c1 + src2.c3 = src3.c5 AND src3.c5 < 200
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) 100))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c6)))) src3) (AND (= (+ (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (. (TOK_TABLE_OR_COL src3) c5)) (< (. (TOK_TABLE_OR_COL src3) c5) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4)))))
@@ -143,6 +153,17 @@
limit: -1
+query: SELECT src1.c1, src2.c4
+FROM
+(SELECT src.key as c1, src.value as c2 from src) src1
+JOIN
+(SELECT src.key as c3, src.value as c4 from src) src2
+ON src1.c1 = src2.c3 AND src1.c1 < 100
+JOIN
+(SELECT src.key as c5, src.value as c6 from src) src3
+ON src1.c1 + src2.c3 = src3.c5 AND src3.c5 < 200
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/419446929/1272003304.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/ppd_gby_join.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_gby_join.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_gby_join.q.out (working copy)
@@ -1,3 +1,12 @@
+query: EXPLAIN
+SELECT src1.c1, count(1)
+FROM
+(SELECT src.key AS c1, src.value AS c2 from src where src.key > '1' ) src1
+JOIN
+(SELECT src.key AS c3, src.value AS c4 from src where src.key > '2' ) src2
+ON src1.c1 = src2.c3 AND src1.c1 < '400'
+WHERE src1.c1 > '20' AND (src1.c2 < 'val_50' OR src1.c1 > '2') AND (src2.c3 > '50' OR src1.c1 < '50') AND (src2.c3 <> '4')
+GROUP BY src1.c1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '1')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '2')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL src1) c1) '20') (OR (< (. (TOK_TABLE_OR_COL src1) c2) 'val_50') (> (. (TOK_TABLE_OR_COL src1) c1) '2'))) (OR (> (. (TOK_TABLE_OR_COL src2) c3) '50') (< (. (TOK_TABLE_OR_COL src1) c1) '50'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '4'))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src1) c1))))
@@ -101,7 +110,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/409060007/303244387.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/649258693/64817789.10002
Reduce Output Operator
key expressions:
expr: 0
Index: ql/src/test/results/clientpositive/groupby7_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby7_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby7_map_skew.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))))
@@ -66,7 +72,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/406736263/88952059.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1783138161/343279396.10004
Reduce Output Operator
key expressions:
expr: 0
@@ -127,7 +133,7 @@
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/406736263/88952059.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1783138161/343279396.10005
Reduce Output Operator
key expressions:
expr: 0
@@ -159,7 +165,7 @@
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/406736263/88952059.10004
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1783138161/343279396.10006
Reduce Output Operator
key expressions:
expr: 0
@@ -202,6 +208,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/304069265/587781436.10000
0 0.0
10 10.0
100 200.0
@@ -511,6 +526,9 @@
96 96.0
97 194.0
98 196.0
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1656610083/39812005.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/input14.q.out
===================================================================
--- ql/src/test/results/clientpositive/input14.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input14.q.out (working copy)
@@ -1,3 +1,12 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
@@ -73,6 +82,18 @@
name: dest1
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value)
+ USING '/bin/cat' AS (tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/815600455/1462527055.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/udf6.q.out
===================================================================
--- ql/src/test/results/clientpositive/udf6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/udf6.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: EXPLAIN
+SELECT IF(TRUE, 1, 2) FROM dest1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2)))))
@@ -27,7 +33,12 @@
limit: -1
+query: SELECT IF(TRUE, 1, 2) FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/238330577/1184838062.10000
1
+query: EXPLAIN
+SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"), IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), 2), IF(FALSE, CAST(127 AS TINYINT), 128), IF(FALSE, CAST(127 AS TINYINT), CAST(128 AS SMALLINT)) FROM dest1
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TOK_NULL 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TRUE "a" "b")) (TOK_SELEXPR (TOK_FUNCTION IF TRUE 0.1 0.2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_BIGINT 1) 2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_TINYINT 127) 128)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_TINYINT 127) (TOK_FUNCTION TOK_SMALLINT 128))))))
@@ -71,4 +82,7 @@
limit: -1
+query: SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"), IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), 2), IF(FALSE, CAST(127 AS TINYINT), 128), IF(FALSE, CAST(127 AS TINYINT), CAST(128 AS SMALLINT)) FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/174418965/137845021.10000
1 2 2 a 0.1 2 128 128
Index: ql/src/test/results/clientpositive/groupby8_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby8_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby8_noskew.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB DEST2)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL SRC) key)) (TOK_SELEXPR (TOK_FUNCTIONDI COUNT (TOK_FUNCTION SUBSTR (. (TOK_TABLE_OR_COL SRC) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL SRC) key))))
@@ -78,7 +84,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1800149890/170127499.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/280831989/373656.10004
Reduce Output Operator
key expressions:
expr: key
@@ -120,6 +126,15 @@
name: dest2
+query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key
+Input: default/src
+Output: default/dest1
+Output: default/dest2
+query: SELECT DEST1.* FROM DEST1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/14233066/985088079.10000
0 1
10 1
100 1
@@ -429,6 +444,9 @@
96 1
97 1
98 1
+query: SELECT DEST2.* FROM DEST2
+Input: default/dest2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1779608708/316437586.10000
0 1
10 1
100 1
Index: ql/src/test/results/clientpositive/input_part1.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part1.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) hr)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) ds))) (TOK_WHERE (and (and (< (. (TOK_TABLE_OR_COL srcpart) key) 100) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-08')) (= (. (TOK_TABLE_OR_COL srcpart) hr) '12')))))
@@ -37,7 +41,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/1015869899/511155619.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/193974541/25724604.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -51,14 +55,14 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=12/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
partition values:
ds 2008-04-08
@@ -77,7 +81,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
@@ -85,7 +89,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/1015869899/511155619.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/193974541/25724604.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -99,11 +103,19 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/193974541/25724604.10001
+query: FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/405215409/117459934.10000
86 val_86 12 2008-04-08
27 val_27 12 2008-04-08
98 val_98 12 2008-04-08
Index: ql/src/test/results/clientpositive/join17.q.out
===================================================================
--- ql/src/test/results/clientpositive/join17.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join17.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key1 INT, value1 STRING, key2 INT, value2 STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.*, src2.*
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src1)) (TOK_SELEXPR (TOK_ALLCOLREF src2)))))
@@ -41,9 +45,9 @@
type: string
Needs Tagging: true
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/src
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -58,7 +62,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/src
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/src
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: src
Reduce Operator Tree:
@@ -91,7 +95,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/1037224367/210848515.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/451051116/633047855.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -105,7 +109,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
@@ -113,7 +117,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/1037224367/210848515.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/451051116/633047855.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -127,11 +131,19 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/451051116/633047855.10001
+query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.*, src2.*
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/13176457/983700628.10000
0 val_0 0 val_0
0 val_0 0 val_0
0 val_0 0 val_0
Index: ql/src/test/results/clientpositive/input18.q.out
===================================================================
--- ql/src/test/results/clientpositive/input18.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input18.q.out (working copy)
@@ -1,3 +1,12 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+ USING '/bin/cat'
+ CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (. (TOK_TABLE_OR_COL tmap) value) '\t' '+'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))
@@ -77,6 +86,18 @@
name: dest1
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+ USING '/bin/cat'
+ CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/543730859/7367833.10000
0 val_0+3+7
0 val_0+3+7
0 val_0+3+7
Index: ql/src/test/results/clientpositive/groupby2.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby2.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby2.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_g2)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))
@@ -44,7 +48,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/523911021/121514428.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/35355459/37601301.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -104,6 +108,13 @@
name: dest_g2
+query: FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+Input: default/src
+Output: default/dest_g2
+query: SELECT dest_g2.* FROM dest_g2
+Input: default/dest_g2
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/214974209/1305159333.10000
0 1 00.0
1 71 116414.0
2 69 225571.0
Index: ql/src/test/results/clientpositive/input_part5.q.out
===================================================================
--- ql/src/test/results/clientpositive/input_part5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input_part5.q.out (working copy)
@@ -1,3 +1,8 @@
+query: drop table tmptable
+query: create table tmptable(key string, value string, hr string, ds string)
+query: EXPLAIN
+insert overwrite table tmptable
+SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.key < 100
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB tmptable)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL x) ds) '2008-04-08') (< (. (TOK_TABLE_OR_COL x) key) 100)))))
@@ -44,6 +49,14 @@
name: tmptable
+query: insert overwrite table tmptable
+SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.key < 100
+Input: default/srcpart/ds=2008-04-08/hr=11
+Input: default/srcpart/ds=2008-04-08/hr=12
+Output: default/tmptable
+query: select * from tmptable x sort by x.key
+Input: default/tmptable
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/137438172/765791337.10000
0 val_0 2008-04-08 11
0 val_0 2008-04-08 11
0 val_0 2008-04-08 11
@@ -212,3 +225,4 @@
98 val_98 2008-04-08 11
98 val_98 2008-04-08 12
98 val_98 2008-04-08 12
+query: drop table tmptable
Index: ql/src/test/results/clientpositive/nullgroup3.q.out
===================================================================
--- ql/src/test/results/clientpositive/nullgroup3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/nullgroup3.q.out (working copy)
@@ -1,3 +1,9 @@
+query: DROP TABLE tstparttbl
+query: CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
+query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
+query: explain
+select count(1) from tstparttbl
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF tstparttbl)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -42,7 +48,17 @@
limit: -1
+query: select count(1) from tstparttbl
+Input: default/tstparttbl/ds=2008-04-09
+Input: default/tstparttbl/ds=2008-04-08
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1221493083/197971283.10000
500
+query: DROP TABLE tstparttbl2
+query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
+query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
+query: explain
+select count(1) from tstparttbl2
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF tstparttbl2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -87,6 +103,16 @@
limit: -1
+query: select count(1) from tstparttbl2
+Input: default/tstparttbl2/ds=2008-04-09
+Input: default/tstparttbl2/ds=2008-04-08
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/200536084/27806968.10000
+query: DROP TABLE tstparttbl
+query: CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
+query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
+query: explain
+select count(1) from tstparttbl
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF tstparttbl)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -131,7 +157,17 @@
limit: -1
+query: select count(1) from tstparttbl
+Input: default/tstparttbl/ds=2008-04-09
+Input: default/tstparttbl/ds=2008-04-08
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1474931838/738960864.10000
500
+query: DROP TABLE tstparttbl2
+query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
+query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
+query: explain
+select count(1) from tstparttbl2
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF tstparttbl2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -176,3 +212,7 @@
limit: -1
+query: select count(1) from tstparttbl2
+Input: default/tstparttbl2/ds=2008-04-09
+Input: default/tstparttbl2/ds=2008-04-08
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/48371033/920895567.10000
Index: ql/src/test/results/clientpositive/groupby6.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby6.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby6.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECTDI (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5 1)))))
@@ -41,7 +45,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/696030063/376464961.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/605013417/275946039.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -81,6 +85,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/26109249/3810410.10000
0
1
2
Index: ql/src/test/results/clientpositive/union5.q.out
===================================================================
--- ql/src/test/results/clientpositive/union5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union5.q.out (working copy)
@@ -1,3 +1,7 @@
+query: explain
+ select unionsrc.key, count(1) FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2) unionsrc group by unionsrc.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst1' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'tst2' key) (TOK_SELEXPR (TOK_FUNCTION count 1) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL unionsrc) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL unionsrc) key))))
@@ -46,7 +50,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1122480073/45739411.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1649113926/399998151.10002
Union
Group By Operator
aggregations:
@@ -67,7 +71,7 @@
value expressions:
expr: 1
type: bigint
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/1122480073/45739411.10003
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/1649113926/399998151.10003
Union
Group By Operator
aggregations:
@@ -148,5 +152,10 @@
limit: -1
+query: select unionsrc.key, count(1) FROM (select 'tst1' as key, count(1) as value from src s1
+ UNION ALL
+ select 'tst2' as key, count(1) as value from src s2) unionsrc group by unionsrc.key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1203907479/249233034.10000
tst1 1
tst2 1
Index: ql/src/test/results/clientpositive/input2_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/input2_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input2_limit.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRC x WHERE x.key < 300 LIMIT 5
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL x) key) 300)) (TOK_LIMIT 5)))
@@ -33,6 +35,9 @@
limit: 5
+query: SELECT x.* FROM SRC x WHERE x.key < 300 LIMIT 5
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/472205870/117610052.10000
238 val_238
86 val_86
27 val_27
Index: ql/src/test/results/clientpositive/create_insert_outputformat.q.out
===================================================================
--- ql/src/test/results/clientpositive/create_insert_outputformat.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/create_insert_outputformat.q.out (working copy)
@@ -1,6 +1,36 @@
+query: DROP TABLE table_test_output_format
+query: CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
+ INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
+query: FROM src
+INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10
+Input: default/src
+Output: default/table_test_output_format
+query: describe table_test_output_format
key int
value string
+query: DROP TABLE table_test_output_format
+query: DROP TABLE table_test_output_format_sequencefile
+query: CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS
+ INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat'
+query: FROM src
+INSERT OVERWRITE TABLE table_test_output_format_sequencefile SELECT src.key, src.value LIMIT 10
+Input: default/src
+Output: default/table_test_output_format_sequencefile
+query: describe table_test_output_format_sequencefile
key int
value string
+query: DROP TABLE table_test_output_format_sequencefile
+query: DROP TABLE table_test_output_format_hivesequencefile
+query: CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS
+ INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
+query: FROM src
+INSERT OVERWRITE TABLE table_test_output_format_hivesequencefile SELECT src.key, src.value LIMIT 10
+Input: default/src
+Output: default/table_test_output_format_hivesequencefile
+query: describe table_test_output_format_hivesequencefile
key int
value string
+query: DROP TABLE table_test_output_format_hivesequencefile
Index: ql/src/test/results/clientpositive/union9.q.out
===================================================================
--- ql/src/test/results/clientpositive/union9.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/union9.q.out (working copy)
@@ -1,3 +1,7 @@
+query: explain
+ select count(1) FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2 UNION ALL
+ select s3.key as key, s3.value as value from src s3) unionsrc
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src s1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) value) value)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) value) value))))) (TOK_QUERY (TOK_FROM (TOK_TABREF src s3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s3) key) key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s3) value) value))))) unionsrc)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
@@ -84,4 +88,9 @@
limit: -1
+query: select count(1) FROM (select s1.key as key, s1.value as value from src s1 UNION ALL
+ select s2.key as key, s2.value as value from src s2 UNION ALL
+ select s3.key as key, s3.value as value from src s3) unionsrc
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/371390513/112754866.10000
1500
Index: ql/src/test/results/clientpositive/input4_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/input4_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/input4_limit.q.out (working copy)
@@ -1,3 +1,5 @@
+query: explain
+select * from src sort by key limit 10
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))) (TOK_LIMIT 10)))
@@ -42,7 +44,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/532000507/301674559.10002
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/299272091/500299840.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -69,6 +71,9 @@
limit: 10
+query: select * from src sort by key limit 10
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/132764629/1854197962.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/sort.q.out
===================================================================
--- ql/src/test/results/clientpositive/sort.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sort.q.out (working copy)
@@ -1,3 +1,5 @@
+query: EXPLAIN
+SELECT x.* FROM SRC x SORT BY key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))
@@ -41,6 +43,9 @@
limit: -1
+query: SELECT x.* FROM SRC x SORT BY key
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/875507913/8639603.10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/mapreduce4.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce4.q.out (working copy)
@@ -1,3 +1,11 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten DESC, one ASC
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
@@ -79,6 +87,17 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten DESC, one ASC
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/608149359/217174258.10000
90 9 0 val_90
90 9 0 val_90
90 9 0 val_90
Index: ql/src/test/results/clientpositive/mapreduce8.q.out
===================================================================
--- ql/src/test/results/clientpositive/mapreduce8.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/mapreduce8.q.out (working copy)
@@ -1,3 +1,11 @@
+query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+DISTRIBUTE BY rand(3)
+SORT BY tvalue, tkey
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_ALLCOLREF src) (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST k v tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_FUNCTION rand 3)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
@@ -89,6 +97,17 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+DISTRIBUTE BY rand(3)
+SORT BY tvalue, tkey
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/903115095/320438958.10000
0 val_0 0 0 0 val_0
0 val_0 0 0 0 val_0
0 val_0 0 0 0 val_0
Index: ql/src/test/results/clientpositive/groupby5_noskew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby5_noskew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby5_noskew.q.out (working copy)
@@ -1,3 +1,9 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT src.key, sum(substr(src.value,5))
+FROM src
+GROUP BY src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -62,6 +68,15 @@
name: dest1
+query: INSERT OVERWRITE TABLE dest1
+SELECT src.key, sum(substr(src.value,5))
+FROM src
+GROUP BY src.key
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1344799541/10680715.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/sample1.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample1.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample1.q.out (working copy)
@@ -1,3 +1,8 @@
+query: CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON rand()) s
+WHERE s.ds='2008-04-08' and s.hr='11'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcpart (TOK_TABLESAMPLE 1 1 (TOK_FUNCTION rand)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL s) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL s) hr) '11')))))
@@ -41,7 +46,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/505940550/970696290.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/139440119/111511740.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -55,14 +60,14 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart/hr=11/ds=2008-04-08
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
partition values:
ds 2008-04-08
@@ -81,7 +86,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcpart
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcpart
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
@@ -89,7 +94,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/505940550/970696290.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/139440119/111511740.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -103,11 +108,20 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/139440119/111511740.10001
+query: INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON rand()) s
+WHERE s.ds='2008-04-08' and s.hr='11'
+Input: default/srcpart/ds=2008-04-08/hr=11
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/52723391/39379816.10000
238 val_238 2008-04-08 11
86 val_86 2008-04-08 11
311 val_311 2008-04-08 11
Index: ql/src/test/results/clientpositive/groupby6_map_skew.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby6_map_skew.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby6_map_skew.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECTDI (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5 1)))))
@@ -46,7 +50,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/728662893/342590781.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/19945520/12512881.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -86,6 +90,13 @@
name: dest1
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,5,1)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/2643090/1744448740.10000
0
1
2
Index: ql/src/test/results/clientpositive/ppd_outer_join3.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join3.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/ppd_outer_join3.q.out (working copy)
@@ -1,3 +1,11 @@
+query: EXPLAIN
+ FROM
+ src a
+ FULL OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_TABREF src a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) '10') (< (. (TOK_TABLE_OR_COL a) key) '20')) (> (. (TOK_TABLE_OR_COL b) key) '15')) (< (. (TOK_TABLE_OR_COL b) key) '25')))))
@@ -72,6 +80,15 @@
limit: -1
+query: FROM
+ src a
+ FULL OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/68623657/698316651.10000
150 val_150 150 val_150
152 val_152 152 val_152
152 val_152 152 val_152
Index: ql/src/test/results/clientpositive/join22.q.out
===================================================================
--- ql/src/test/results/clientpositive/join22.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/join22.q.out (working copy)
@@ -1,3 +1,5 @@
+query: explain
+SELECT src5.src1_value FROM (SELECT src3.*, src4.value as src4_value, src4.key as src4_key FROM src src4 JOIN (SELECT src2.*, src1.key as src1_key, src1.value as src1_value FROM src src1 JOIN src src2 ON src1.key = src2.key) src3 ON src3.src1_key = src4.key) src5
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src4) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) src1_key) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) src1_value)))) src3) (= (. (TOK_TABLE_OR_COL src3) src1_key) (. (TOK_TABLE_OR_COL src4) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src4) value) src4_value) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src4) key) src4_key)))) src5)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src5) src1_value)))))
Index: ql/src/test/results/clientpositive/groupby1_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby1_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby1_map.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
@@ -69,6 +72,12 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/3018978/154486697.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/groupby1_limit.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby1_limit.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby1_limit.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key LIMIT 5
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key)) (TOK_LIMIT 5)))
@@ -56,7 +59,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /data/users/zshao/tools/416-trunk-apache-hive/build/ql/tmp/889679376/188373553.10001
+ /data/users/athusoo/commits/hive_trunk_ws8/build/ql/tmp/3130862/578287351.10002
Reduce Output Operator
sort order:
tag: -1
@@ -94,6 +97,12 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key LIMIT 5
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/212729573/95177349.10000
0 0.0
10 10.0
100 200.0
Index: ql/src/test/results/clientpositive/sample5.q.out
===================================================================
--- ql/src/test/results/clientpositive/sample5.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/sample5.q.out (working copy)
@@ -1,3 +1,7 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 5 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
@@ -29,7 +33,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/127373595/158315357.10000.insclause-0
+ directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1095779156/1227078175.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -43,14 +47,14 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
Path -> Alias:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket
Path -> Partition:
- file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket
+ file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -66,7 +70,7 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/srcbucket
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/srcbucket
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcbucket
@@ -74,7 +78,7 @@
Move Operator
tables:
replace: true
- source: /data/users/zshao/tools/416-trunk-apache-hive/ql/../build/ql/tmp/127373595/158315357.10000.insclause-0
+ source: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1095779156/1227078175.10000.insclause-0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -88,11 +92,19 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/data/users/zshao/tools/416-trunk-apache-hive/build/ql/test/data/warehouse/dest1
+ location file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/dest1
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
+ tmp directory: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/1095779156/1227078175.10001
+query: INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
+Input: default/srcbucket
+Output: default/dest1
+query: SELECT dest1.* FROM dest1 SORT BY key
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/665499523/380611837.10000
2 val_2
2 val_3
18 val_18
Index: ql/src/test/results/clientpositive/inputddl4.q.out
===================================================================
--- ql/src/test/results/clientpositive/inputddl4.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/inputddl4.q.out (working copy)
@@ -1,3 +1,12 @@
+query: DROP TABLE INPUTDDL4
+query: CREATE TABLE INPUTDDL4(viewTime STRING, userid INT,
+ page_url STRING, referrer_url STRING,
+ friends ARRAY, properties MAP,
+ ip STRING COMMENT 'IP Address of the User')
+ COMMENT 'This is the page view table'
+ PARTITIONED BY(ds DATETIME, country STRING)
+ CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS
+query: DESCRIBE INPUTDDL4
viewtime string
userid int
page_url string
@@ -7,6 +16,7 @@
ip string IP Address of the User
ds datetime
country string
+query: DESCRIBE EXTENDED INPUTDDL4
viewtime string
userid int
page_url string
@@ -17,4 +27,5 @@
ds datetime
country string
-Detailed Table Information Table(tableName:inputddl4,dbName:default,owner:rmurthy,createTime:1238030305,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:viewtime,type:string,comment:null), FieldSchema(name:userid,type:int,comment:null), FieldSchema(name:page_url,type:string,comment:null), FieldSchema(name:referrer_url,type:string,comment:null), FieldSchema(name:friends,type:array,comment:null), FieldSchema(name:properties,type:map,comment:null), FieldSchema(name:ip,type:string,comment:IP Address of the User)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/inputddl4,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[userid],sortCols:[Order(col:viewtime,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment=This is the page view table})
+Detailed Table Information Table(tableName:inputddl4,dbName:default,owner:athusoo,createTime:1241278343,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:viewtime,type:string,comment:null), FieldSchema(name:userid,type:int,comment:null), FieldSchema(name:page_url,type:string,comment:null), FieldSchema(name:referrer_url,type:string,comment:null), FieldSchema(name:friends,type:array,comment:null), FieldSchema(name:properties,type:map,comment:null), FieldSchema(name:ip,type:string,comment:IP Address of the User)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/inputddl4,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[userid],sortCols:[Order(col:viewtime,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment=This is the page view table})
+query: DROP TABLE INPUTDDL4
Index: ql/src/test/results/clientpositive/groupby5_map.q.out
===================================================================
--- ql/src/test/results/clientpositive/groupby5_map.q.out (revision 770442)
+++ ql/src/test/results/clientpositive/groupby5_map.q.out (working copy)
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(key INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT sum(src.key)
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (. (TOK_TABLE_OR_COL src) key))))))
@@ -57,4 +60,10 @@
name: dest1
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT sum(src.key)
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/168289191/1427236440.10000
130091
Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (revision 0)
+++ ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (revision 0)
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import java.util.Set;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+
+/**
+ * Implementation of a pre execute hook that simply prints out its
+ * parameters to standard output.
+ */
+public class PreExecutePrinter implements PreExecute {
+
+ @Override
+ public void run(SessionState sess, Set inputs,
+ Set outputs, UserGroupInformation ugi)
+ throws Exception {
+
+ LogHelper console = SessionState.getConsole();
+
+ if (console == null)
+ return;
+
+ if (sess != null)
+ console.printError("query: " + sess.getCmd().trim());
+
+ for(ReadEntity re: inputs) {
+ console.printError("Input: " + re.toString());
+ }
+ for(WriteEntity we: outputs) {
+ console.printError("Output: " + we.toString());
+ }
+ }
+
+}
Index: ql/src/test/queries/clientnegative/bad_exec_hooks.q
===================================================================
--- ql/src/test/queries/clientnegative/bad_exec_hooks.q (revision 0)
+++ ql/src/test/queries/clientnegative/bad_exec_hooks.q (revision 0)
@@ -0,0 +1,6 @@
+set hive.exec.pre.hooks="org.this.is.a.bad.class";
+
+EXPLAIN
+SELECT x.* FROM SRC x LIMIT 20;
+
+SELECT x.* FROM SRC x LIMIT 20;
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (revision 770442)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (working copy)
@@ -22,11 +22,14 @@
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
+import java.util.Set;
import java.io.Serializable;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.plan.tableDesc;
@@ -49,6 +52,8 @@
* @param currTask the current task
* @param currTopOp the current top operator being traversed
* @param currAliasId the current alias for the to operator
+ * @param inputs the list of read entities
+ * @param outputs the list of write entities
*/
public GenMapRedCtx (Task extends Serializable> currTask,
Operator extends Serializable> currTopOp,
@@ -133,8 +138,19 @@
private UnionOperator currUnionOp;
private String currAliasId;
private List> rootOps;
-
+
/**
+ * Set of read entities. This list is generated by the walker and is
+ * passed to the hooks.
+ */
+ private Set inputs;
+ /**
+ * Set of write entities. This list is generated by the walker and is
+ * passed to the hooks.
+ */
+ private Set outputs;
+
+ /**
* @param opTaskMap reducer to task mapping
* @param seenOps operator already visited
* @param parseCtx current parse context
@@ -142,6 +158,8 @@
* @param mvTask the final move task
* @param scratchDir directory for temp destinations
* @param mapCurrCtx operator to task mappings
+ * @param inputs the set of input tables/partitions generated by the walk
+ * @param outputs the set of destinations generated by the walk
*/
public GenMRProcContext (
HashMap, Task extends Serializable>> opTaskMap,
@@ -150,7 +168,9 @@
Task extends Serializable> mvTask,
List> rootTasks,
String scratchDir, int randomid, int pathid,
- Map, GenMapRedCtx> mapCurrCtx)
+ Map, GenMapRedCtx> mapCurrCtx,
+ Set inputs,
+ Set outputs)
{
this.opTaskMap = opTaskMap;
@@ -162,6 +182,8 @@
this.randomid = randomid;
this.pathid = pathid;
this.mapCurrCtx = mapCurrCtx;
+ this.inputs = inputs;
+ this.outputs = outputs;
currTask = null;
currTopOp = null;
currUnionOp = null;
@@ -371,4 +393,18 @@
public void setUnionTask(UnionOperator op, GenMRUnionCtx uTask) {
unionTaskMap.put(op, uTask);
}
+
+ /**
+ * Get the input set.
+ */
+ public Set getInputs() {
+ return inputs;
+ }
+
+ /**
+ * Get the output set.
+ */
+ public Set getOutputs() {
+ return outputs;
+ }
}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (revision 770442)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (working copy)
@@ -49,6 +49,8 @@
import org.apache.hadoop.hive.ql.metadata.*;
import org.apache.hadoop.hive.ql.parse.*;
import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRUnionCtx;
@@ -237,6 +239,7 @@
mapredWork plan, boolean local, GenMRProcContext opProcCtx)
throws SemanticException {
ParseContext parseCtx = opProcCtx.getParseCtx();
+ Set inputs = opProcCtx.getInputs();
if (!local) {
// Generate the map work for this alias_id
@@ -256,6 +259,11 @@
SamplePruner samplePruner = parseCtx.getAliasToSamplePruner().get(alias_id);
for (Partition part : parts) {
+ if (part.getTable().isPartitioned())
+ inputs.add(new ReadEntity(part));
+ else
+ inputs.add(new ReadEntity(part.getTable()));
+
// Later the properties have to come from the partition as opposed
// to from the table in order to support versioning.
Path paths[];
Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (revision 0)
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import java.net.URI;
+
+/**
+ * This class encapsulates an object that is being written to
+ * by the query. This object may be a table, partition, dfs
+ * directory or a local directory.
+ */
+public class WriteEntity {
+
+ /**
+ * The type of the write entity.
+ */
+ public static enum Type {TABLE, PARTITION, DFS_DIR, LOCAL_DIR};
+
+ /**
+ * The type.
+ */
+ private Type typ;
+
+ /**
+ * The table. This is null if this is a directory.
+ */
+ private Table t;
+
+ /**
+ * The partition.This is null if this object is not a partition.
+ */
+ private Partition p;
+
+ /**
+ * The directory if this is a directory.
+ */
+ private String d;
+
+ /**
+ * Constructor for a table.
+ *
+ * @param t Table that is written to.
+ */
+ public WriteEntity(Table t) {
+ this.d = null;
+ this.p = null;
+ this.t = t;
+ this.typ = Type.TABLE;
+ }
+
+ /**
+ * Constructor for a partition.
+ *
+ * @param p Partition that is written to.
+ */
+ public WriteEntity(Partition p) {
+ this.d = null;
+ this.p = p;
+ this.t = p.getTable();
+ this.typ = Type.PARTITION;
+ }
+
+ /**
+ * Constructor for a file.
+ *
+ * @param d The name of the directory that is being written to.
+ * @param islocal Flag to decide whether this directory is local or in dfs.
+ */
+ public WriteEntity(String d, boolean islocal) {
+ this.d = d;
+ this.p = null;
+ this.t = null;
+ if (islocal) {
+ this.typ = Type.LOCAL_DIR;
+ }
+ else {
+ this.typ = Type.DFS_DIR;
+ }
+ }
+
+ /**
+ * Get the type of the entity.
+ */
+ public Type getType() {
+ return typ;
+ }
+
+ /**
+ * Get the location of the entity.
+ */
+ public URI getLocation() throws Exception {
+ if (typ == Type.TABLE)
+ return t.getDataLocation();
+
+ if (typ == Type.PARTITION)
+ return p.getDataLocation();
+
+ if (typ == Type.DFS_DIR || typ == Type.LOCAL_DIR)
+ return new URI(d);
+
+ return null;
+ }
+
+ /**
+ * toString function.
+ */
+ public String toString() {
+ switch(typ) {
+ case TABLE:
+ return t.getDbName() + "/" + t.getName();
+ case PARTITION:
+ return t.getDbName() + "/" + t.getName() + "/" + p.getName();
+ default:
+ return d;
+ }
+ }
+
+ /**
+ * Equals function.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+
+ if (o instanceof WriteEntity) {
+ WriteEntity ore = (WriteEntity)o;
+ return (toString().equalsIgnoreCase(ore.toString()));
+ }
+ else
+ return false;
+ }
+
+ /**
+ * Hashcode function.
+ */
+ @Override
+ public int hashCode() {
+ return toString().hashCode();
+ }
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (revision 0)
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import java.util.Map;
+import java.net.URI;
+
+/**
+ * This class encapsulates the information on the partition and
+ * tables that are read by the query.
+ */
+public class ReadEntity {
+
+ /**
+ * The partition. This is null for a non partitioned table.
+ */
+ private Partition p;
+
+ /**
+ * The table.
+ */
+ private Table t;
+
+ /**
+ * Constructor.
+ *
+ * @param t The Table that the query reads from.
+ */
+ public ReadEntity(Table t) {
+ this.t = t;
+ this.p = null;
+ }
+
+ /**
+ * Constructor given a partiton.
+ *
+ * @param p The partition that the query reads from.
+ */
+ public ReadEntity(Partition p) {
+ this.t = p.getTable();
+ this.p = p;
+ }
+ /**
+ * Enum that tells what time of a read entity this is.
+ */
+ public static enum Type {TABLE, PARTITION};
+
+ /**
+ * Get the type.
+ */
+ public Type getType() {
+ return p == null ? Type.TABLE : Type.PARTITION;
+ }
+
+ /**
+ * Get the parameter map of the Entity.
+ */
+ public Map getParameter() {
+ if (p != null) {
+ return p.getTPartition().getParameters();
+ }
+ else {
+ return t.getTTable().getParameters();
+ }
+ }
+
+ /**
+ * Get the location of the entity.
+ */
+ public URI getLocation() {
+ if (p != null) {
+ return p.getDataLocation();
+ }
+ else {
+ return t.getDataLocation();
+ }
+ }
+
+ /**
+ * toString function.
+ */
+ @Override
+ public String toString() {
+ if (p != null) {
+ return p.getTable().getDbName() + "/" + p.getTable().getName() + "/" + p.getName();
+ }
+ else {
+ return t.getDbName() + "/" + t.getName();
+ }
+ }
+
+ /**
+ * Equals function.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+
+ if (o instanceof ReadEntity) {
+ ReadEntity ore = (ReadEntity)o;
+ return (toString().equalsIgnoreCase(ore.toString()));
+ }
+ else
+ return false;
+ }
+
+ /**
+ * Hashcode function.
+ */
+ @Override
+ public int hashCode() {
+ return toString().hashCode();
+ }
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java (revision 0)
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import java.util.Set;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+/**
+ * The pre execute hook interface. A list of such hooks can
+ * be configured to be called after compilation and before
+ * execution.
+ */
+public interface PreExecute {
+
+ /**
+ * The run command that is called just before the execution of the
+ * query.
+ *
+ * @param sess The session state.
+ * @param inputs The set of input tables and partitions.
+ * @param outputs The set of output tables, partitions, local and hdfs directories.
+ * @param ugi The user group security information.
+ */
+ public void run(SessionState sess, Set inputs,
+ Set outputs, UserGroupInformation ugi)
+ throws Exception;
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (revision 770442)
+++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (working copy)
@@ -284,7 +284,7 @@
/**
* initialize or retrieve console object for SessionState
*/
- private static LogHelper getConsole() {
+ public static LogHelper getConsole() {
if(_console == null) {
Log LOG = LogFactory.getLog("SessionState");
_console = new LogHelper(LOG);
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 770442)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy)
@@ -30,7 +30,10 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
public abstract class BaseSemanticAnalyzer {
protected String scratchDir;
@@ -242,6 +245,14 @@
return taskTmpDir;
}
+ public Set getInputs() {
+ return new LinkedHashSet();
+ }
+
+ public Set getOutputs() {
+ return new LinkedHashSet();
+ }
+
public static class tableSpec {
public String tableName;
public Table tableHandle;
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 770442)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy)
@@ -22,12 +22,12 @@
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Formatter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -79,12 +79,9 @@
import org.apache.hadoop.hive.ql.optimizer.Optimizer;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
-import org.apache.hadoop.hive.ql.optimizer.GenMRUnion1;
import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3;
import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
import org.apache.hadoop.hive.ql.exec.*;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.aggregationDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
@@ -115,7 +112,6 @@
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -123,9 +119,11 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+
/**
* Implementation of the semantic analyzer
*/
@@ -143,6 +141,15 @@
private int destTableId;
private UnionProcContext uCtx;
+ /**
+ * ReadEntitites that are passed to the hooks.
+ */
+ private Set inputs;
+ /**
+ * List of WriteEntities that are passed to the hooks.
+ */
+ private Set outputs;
+
private static class Phase1Ctx {
String dest;
int nextNum;
@@ -161,6 +168,9 @@
opParseCtx = new HashMap, OpParseContext>();
this.destTableId = 1;
this.uCtx = null;
+
+ inputs = new LinkedHashSet();
+ outputs = new LinkedHashSet();
}
@@ -2208,6 +2218,7 @@
this.loadTableWork.add(new loadTableDesc(queryTmpdir, getTmpFileName(),
table_desc,
new HashMap()));
+ outputs.add(new WriteEntity(dest_tab));
break;
}
case QBMetaData.DEST_PARTITION:
@@ -2221,6 +2232,7 @@
this.destTableId ++;
this.loadTableWork.add(new loadTableDesc(queryTmpdir, getTmpFileName(), table_desc, dest_part.getSpec()));
+ outputs.add(new WriteEntity(dest_part));
break;
}
case QBMetaData.DEST_LOCAL_FILE:
@@ -2247,10 +2259,12 @@
currentTableId = this.destTableId;
this.destTableId ++;
}
+ boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
this.loadFileWork.add(new loadFileDesc(queryTmpdir, dest_path,
- (dest_type.intValue() == QBMetaData.DEST_DFS_FILE), cols));
+ isDfsDir, cols));
table_desc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode),
cols);
+ outputs.add(new WriteEntity(dest_path, !isDfsDir));
break;
}
default:
@@ -3412,6 +3426,7 @@
if (!tab.isPartitioned()) {
if (qbParseInfo.getDestToWhereExpr().isEmpty())
fetch = new fetchWork(tab.getPath(), Utilities.getTableDesc(tab), qb.getParseInfo().getOuterQueryLimit());
+ inputs.add(new ReadEntity(tab));
}
else {
if (aliasToPruner.size() == 1) {
@@ -3432,6 +3447,7 @@
Partition part = iterParts.next();
listP.add(part.getPartitionPath());
partP.add(Utilities.getPartitionDesc(part));
+ inputs.add(new ReadEntity(part));
}
fetch = new fetchWork(listP, partP, qb.getParseInfo().getOuterQueryLimit());
}
@@ -3480,7 +3496,8 @@
new HashMap, Task extends Serializable>>(),
new ArrayList>(),
getParseContext(), mvTask, this.rootTasks, this.scratchDir, this.randomid, this.pathid,
- new HashMap, GenMapRedCtx>());
+ new HashMap, GenMapRedCtx>(),
+ inputs, outputs);
// create a walker which walks the tree in a DFS manner while maintaining the operator stack.
// The dispatcher generates the plan from the operator tree
@@ -3751,4 +3768,12 @@
return newParameters;
}
+ @Override
+ public Set getInputs() {
+ return inputs;
+ }
+
+ public Set getOutputs() {
+ return outputs;
+ }
}
Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 770442)
+++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy)
@@ -40,10 +40,13 @@
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.PreExecute;
import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
import org.apache.hadoop.hive.ql.plan.tableDesc;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -209,6 +212,23 @@
return execute();
}
+ private List getPreExecHooks() throws Exception {
+ ArrayList pehooks = new ArrayList();
+ String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);
+ String[] peClasses = pestr.split(",");
+
+ for(String peClass: peClasses) {
+ try {
+ pehooks.add((PreExecute)Class.forName(peClass).newInstance());
+ } catch (ClassNotFoundException e) {
+ console.printError("Pre Exec Hook Class not found:" + e.getMessage());
+ throw e;
+ }
+ }
+
+ return pehooks;
+ }
+
public int execute() {
boolean noName = StringUtils.isEmpty(conf
.getVar(HiveConf.ConfVars.HADOOPJOBNAME));
@@ -220,7 +240,7 @@
conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, queryStr);
- try {
+ try {
LOG.info("Starting command: " + queryStr);
if (SessionState.get() != null)
@@ -229,6 +249,14 @@
resStream = null;
BaseSemanticAnalyzer sem = plan.getPlan();
+
+ // Get all the pre execution hooks and execute them.
+ for(PreExecute peh: getPreExecHooks()) {
+ peh.run(SessionState.get(),
+ sem.getInputs(), sem.getOutputs(),
+ UserGroupInformation.getCurrentUGI());
+ }
+
int jobs = countJobs(sem.getRootTasks());
if (jobs > 0) {
console.printInfo("Total MapReduce jobs = " + jobs);