Index: contrib/src/test/results/clientpositive/serde_typedbytes.q.out =================================================================== --- contrib/src/test/results/clientpositive/serde_typedbytes.q.out (revision 927279) +++ contrib/src/test/results/clientpositive/serde_typedbytes.q.out (working copy) @@ -88,7 +88,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/1970906774/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-06_067_8517463238607886468/10000 Stage: Stage-0 Move Operator @@ -103,7 +103,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/1458788735/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-06_067_8517463238607886468/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -151,14 +151,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/141823915/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-09_634_2163738019319878668/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/141823915/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-09_634_2163738019319878668/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 @@ -664,3 +668,5 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: contrib/src/test/results/clientpositive/fileformat_base64.q.out =================================================================== --- contrib/src/test/results/clientpositive/fileformat_base64.q.out (revision 927279) +++ contrib/src/test/results/clientpositive/fileformat_base64.q.out (working copy) @@ -47,7 +47,7 @@ key int value string -Detailed Table Information Table(tableName:base64_test, dbName:default, owner:njain, createTime:1253817673, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/base64_test, inputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat, outputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{}) +Detailed Table Information Table(tableName:base64_test, dbName:default, owner:athusoo, createTime:1269542674, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/test/data/warehouse/base64_test, inputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat, outputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269542674}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: FROM src INSERT OVERWRITE TABLE base64_test SELECT key, value WHERE key < 10 @@ -60,14 +60,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@base64_test +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM base64_test PREHOOK: type: QUERY PREHOOK: Input: default@base64_test -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1384563275/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-44-37_960_5833473717938173526/10000 POSTHOOK: query: SELECT * FROM base64_test POSTHOOK: type: QUERY POSTHOOK: Input: default@base64_test -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1384563275/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-44-37_960_5833473717938173526/10000 +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 4 val_4 8 val_8 @@ -96,14 +100,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@base64_test +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM base64_test PREHOOK: type: QUERY PREHOOK: Input: default@base64_test -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1705395068/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-44-41_232_4132198491349512842/10000 POSTHOOK: query: SELECT * FROM base64_test POSTHOOK: type: QUERY POSTHOOK: Input: default@base64_test -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1705395068/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-44-41_232_4132198491349512842/10000 +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 4 val_4 8 val_8 @@ -119,3 +131,7 @@ POSTHOOK: query: DROP TABLE base64_test POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@base64_test +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: base64_test.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: contrib/src/test/results/clientpositive/serde_typedbytes2.q.out =================================================================== --- contrib/src/test/results/clientpositive/serde_typedbytes2.q.out (revision 927279) +++ contrib/src/test/results/clientpositive/serde_typedbytes2.q.out (working copy) @@ -81,7 +81,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/124860625/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-12_258_7715465413381150324/10000 Stage: Stage-0 Move Operator @@ -96,7 +96,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/1806478393/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-12_258_7715465413381150324/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -144,14 +144,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/1698289796/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-15_757_4128953542754598891/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/1698289796/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-15_757_4128953542754598891/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 @@ -657,3 +661,5 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: contrib/src/test/results/clientpositive/serde_typedbytes3.q.out =================================================================== --- contrib/src/test/results/clientpositive/serde_typedbytes3.q.out (revision 927279) +++ contrib/src/test/results/clientpositive/serde_typedbytes3.q.out (working copy) @@ -81,7 +81,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/1218273829/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-18_651_4176850179590983582/10000 Stage: Stage-0 Move Operator @@ -96,7 +96,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/230578366/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-18_651_4176850179590983582/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -144,14 +144,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/2038033327/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-22_183_3968428275430968760/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive/trunk/build/contrib/scratchdir/2038033327/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-22_183_3968428275430968760/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 @@ -657,3 +661,5 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: contrib/src/test/results/clientpositive/serde_typedbytes4.q.out =================================================================== --- contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (revision 927279) +++ contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (working copy) @@ -133,14 +133,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1776839815/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-28_542_5637818087678321223/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1776839815/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-28_542_5637818087678321223/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -230,3 +234,5 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: contrib/src/test/results/clientpositive/serde_typedbytes5.q.out =================================================================== --- contrib/src/test/results/clientpositive/serde_typedbytes5.q.out (revision 927279) +++ contrib/src/test/results/clientpositive/serde_typedbytes5.q.out (working copy) @@ -88,7 +88,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-02-26_16-46-00_966_8770926542722067748/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-31_124_8690706174825261345/10000 Stage: Stage-0 Move Operator @@ -103,7 +103,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-02-26_16-46-00_966_8770926542722067748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-31_124_8690706174825261345/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -151,14 +151,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-02-26_16-46-05_557_2000233677894920906/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-34_635_1685380532168131209/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-02-26_16-46-05_557_2000233677894920906/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/contrib/scratchdir/hive_2010-03-25_11-45-34_635_1685380532168131209/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 @@ -664,3 +668,5 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientnegative/fileformat_void_input.q.out =================================================================== --- ql/src/test/results/clientnegative/fileformat_void_input.q.out (revision 927279) +++ ql/src/test/results/clientnegative/fileformat_void_input.q.out (working copy) @@ -17,4 +17,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] FAILED: Error in semantic analysis: line 3:20 Input Format must implement InputFormat dest1 Index: ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out =================================================================== --- ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out (revision 927279) +++ ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out (working copy) @@ -26,6 +26,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@smb_bucket4_1 +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table smb_bucket4_2 select * from src PREHOOK: type: QUERY @@ -36,4 +38,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@smb_bucket4_2 +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] FAILED: Error in semantic analysis: Map Join cannot be performed with Outer join Index: ql/src/test/results/clientpositive/join6.q.out =================================================================== --- ql/src/test/results/clientpositive/join6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join6.q.out (working copy) @@ -202,14 +202,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1206114347/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-20_029_8994611109256631197/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1206114347/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-20_029_8994611109256631197/10000 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 NULL NULL 12 val_12 NULL NULL 12 val_12 NULL NULL Index: ql/src/test/results/clientpositive/union19.q.out =================================================================== --- ql/src/test/results/clientpositive/union19.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union19.q.out (working copy) @@ -88,7 +88,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1088795587/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-37_868_5186779957275379888/10004 Union Select Operator expressions: @@ -135,7 +135,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1088795587/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-37_868_5186779957275379888/10005 Union Select Operator expressions: @@ -267,14 +267,24 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/597577682/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-47_704_7977367253561036501/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/597577682/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-47_704_7977367253561036501/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -588,11 +598,16 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1198732652/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-50_674_5649420087295140836/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1198732652/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-50_674_5649420087295140836/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 @@ -1099,8 +1114,18 @@ POSTHOOK: query: drop table DEST1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table DEST2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table DEST2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input32.q.out =================================================================== --- ql/src/test/results/clientpositive/input32.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input32.q.out (working copy) @@ -98,22 +98,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@tst_dest32 +POSTHOOK: Lineage: tst_dest32.a UDAF null[(srcbucket)srcbucket.null, ] PREHOOK: query: select * from tst_dest32 PREHOOK: type: QUERY PREHOOK: Input: default@tst_dest32 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1744614026/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-51_716_506064530206918298/10000 POSTHOOK: query: select * from tst_dest32 POSTHOOK: type: QUERY POSTHOOK: Input: default@tst_dest32 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1744614026/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-51_716_506064530206918298/10000 +POSTHOOK: Lineage: tst_dest32.a UDAF null[(srcbucket)srcbucket.null, ] 1000 PREHOOK: query: drop table tst_dest32 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tst_dest32 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tst_dest32 +POSTHOOK: Lineage: tst_dest32.a UDAF null[(srcbucket)srcbucket.null, ] PREHOOK: query: drop table dest32 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest32 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest32 +POSTHOOK: Lineage: tst_dest32.a UDAF null[(srcbucket)srcbucket.null, ] Index: ql/src/test/results/clientpositive/union4.q.out =================================================================== --- ql/src/test/results/clientpositive/union4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union4.q.out (working copy) @@ -79,7 +79,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/742643465/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-28_658_8211676589096667243/10002 Union Select Operator expressions: @@ -103,7 +103,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/742643465/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-28_658_8211676589096667243/10004 Union Select Operator expressions: @@ -135,7 +135,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/377727473/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-28_658_8211676589096667243/10000 Stage: Stage-0 Move Operator @@ -150,7 +150,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/742643465/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-28_658_8211676589096667243/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -228,14 +228,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src)s2.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] PREHOOK: query: select * from tmptable x sort by x.key PREHOOK: type: QUERY PREHOOK: Input: default@tmptable -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/726784985/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-41_606_985975565675233627/10000 POSTHOOK: query: select * from tmptable x sort by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/726784985/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-41_606_985975565675233627/10000 +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src)s2.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] tst1 500 tst2 500 PREHOOK: query: drop table tmptable @@ -243,3 +247,5 @@ POSTHOOK: query: drop table tmptable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src)s2.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] Index: ql/src/test/results/clientpositive/groupby9.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby9.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby9.q.out (working copy) @@ -95,7 +95,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/840845949/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-41_331_5181016537858573715/10004 Reduce Output Operator key expressions: expr: _col0 @@ -154,7 +154,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/840845949/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-41_331_5181016537858573715/10005 Reduce Output Operator key expressions: expr: _col0 @@ -235,14 +235,24 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/643876838/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-50_891_4799642064697548947/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/643876838/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-50_891_4799642064697548947/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 @@ -555,11 +565,16 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2044980497/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-50_942_4404737445236920780/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2044980497/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-50_942_4404737445236920780/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 1 10 val_10 1 100 val_100 1 @@ -874,8 +889,18 @@ POSTHOOK: query: drop table DEST1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table DEST2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table DEST2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/insert1.q.out =================================================================== --- ql/src/test/results/clientpositive/insert1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/insert1.q.out (working copy) @@ -24,13 +24,19 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@insert2 POSTHOOK: Output: default@insert1 +POSTHOOK: Lineage: insert1.value SIMPLE null[(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert1.key SIMPLE null[(insert2)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table insert1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table insert1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@insert1 +POSTHOOK: Lineage: insert1.value SIMPLE null[(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert1.key SIMPLE null[(insert2)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table insert2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table insert2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@insert2 +POSTHOOK: Lineage: insert1.value SIMPLE null[(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert1.key SIMPLE null[(insert2)a.FieldSchema(name:key, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/input41.q.out =================================================================== --- ql/src/test/results/clientpositive/input41.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input41.q.out (working copy) @@ -21,14 +21,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_sp +POSTHOOK: Lineage: dest_sp.cnt SET null[(src)src.null, (srcpart)srcpart.null, ] PREHOOK: query: select * from dest_sp x order by x.cnt limit 2 PREHOOK: type: QUERY PREHOOK: Input: default@dest_sp -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/333670274/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-24-30_613_5013339092271775768/10000 POSTHOOK: query: select * from dest_sp x order by x.cnt limit 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_sp -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/333670274/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-24-30_613_5013339092271775768/10000 +POSTHOOK: Lineage: dest_sp.cnt SET null[(src)src.null, (srcpart)srcpart.null, ] 0 500 PREHOOK: query: drop table dest_sp @@ -36,3 +38,4 @@ POSTHOOK: query: drop table dest_sp POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_sp +POSTHOOK: Lineage: dest_sp.cnt SET null[(src)src.null, (srcpart)srcpart.null, ] Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy) @@ -61,10 +61,10 @@ tag: -1 Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -78,13 +78,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450251 + transient_lastDdlTime 1269537578 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -95,17 +95,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450251 + transient_lastDdlTime 1269537578 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -119,13 +119,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450251 + transient_lastDdlTime 1269537578 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -136,13 +136,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450251 + transient_lastDdlTime 1269537578 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -178,7 +178,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-44-14_330_6930344147062564978/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-40_035_3573540121238266283/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -189,12 +189,12 @@ columns.types string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450254 + transient_lastDdlTime 1269537580 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -204,7 +204,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-44-14_330_6930344147062564978/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-40_035_3573540121238266283/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -214,15 +214,15 @@ columns.types string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450254 + transient_lastDdlTime 1269537580 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-44-14_330_6930344147062564978/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-40_035_3573540121238266283/10001 PREHOOK: query: FROM srcpart src @@ -243,14 +243,20 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c2 UDAF null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), (srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-44-19_694_6115607695895856271/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-44_780_4246427013825953256/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-44-19_694_6115607695895856271/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-44_780_4246427013825953256/10000 +POSTHOOK: Lineage: dest1.c2 UDAF null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), (srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), ] 0 1 00.0 1 71 132828.0 2 69 251142.0 Index: ql/src/test/results/clientpositive/join29.q.out =================================================================== --- ql/src/test/results/clientpositive/join29.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join29.q.out (working copy) @@ -91,7 +91,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/43069541/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-45_329_5355407501155271175/10002 Common Join Operator condition map: Inner Join 0 to 1 @@ -142,11 +142,11 @@ Local Work: Map Reduce Local Work Alias -> Map Local Tables: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/43069541/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-45_329_5355407501155271175/10004 Fetch Operator limit: -1 Alias -> Map Local Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/43069541/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-45_329_5355407501155271175/10004 Common Join Operator condition map: Inner Join 0 to 1 @@ -202,7 +202,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/967863622/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-45_329_5355407501155271175/10000 Stage: Stage-0 Move Operator @@ -217,7 +217,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/43069541/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-45_329_5355407501155271175/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -315,14 +315,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.cnt2 UDAF null[(src)y.null, ] +POSTHOOK: Lineage: dest_j1.cnt1 UDAF null[(src1)x.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/280501142/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-55_341_8096162527616212555/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/280501142/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-55_341_8096162527616212555/10000 +POSTHOOK: Lineage: dest_j1.cnt2 UDAF null[(src)y.null, ] +POSTHOOK: Lineage: dest_j1.cnt1 UDAF null[(src1)x.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 1 3 146 1 2 150 1 1 @@ -343,3 +349,6 @@ POSTHOOK: query: drop TABLE dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.cnt2 UDAF null[(src)y.null, ] +POSTHOOK: Lineage: dest_j1.cnt1 UDAF null[(src1)x.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/notable_alias1.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/notable_alias1.q.out (working copy) @@ -117,14 +117,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.dummy SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/573546507/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-46_803_5209369323331472727/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/573546507/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-46_803_5209369323331472727/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.dummy SIMPLE null[] 1234 0 3.0 1234 10 1.0 1234 11 1.0 Index: ql/src/test/results/clientpositive/join1.q.out =================================================================== --- ql/src/test/results/clientpositive/join1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join1.q.out (working copy) @@ -105,14 +105,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest_j1.* FROM dest_j1 PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_3/build/ql/scratchdir/hive_2010-02-12_22-17-40_804_3249986716900995170/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-00_072_4287286373479949488/10000 POSTHOOK: query: SELECT dest_j1.* FROM dest_j1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_3/build/ql/scratchdir/hive_2010-02-12_22-17-40_804_3249986716900995170/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-00_072_4287286373479949488/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input6.q.out =================================================================== --- ql/src/test/results/clientpositive/input6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input6.q.out (working copy) @@ -59,7 +59,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1776707090/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-08_196_2563175921615398327/10000 Stage: Stage-0 Move Operator @@ -74,7 +74,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/77560606/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-08_196_2563175921615398327/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -108,11 +108,15 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/2091360854/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-11_535_2259877471640123659/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/2091360854/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-11_535_2259877471640123659/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/loadpart1.q.out =================================================================== --- ql/src/test/results/clientpositive/loadpart1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/loadpart1.q.out (working copy) @@ -29,14 +29,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@hive_test_src POSTHOOK: Output: default@hive_test_dst@pcol1=test_part/pcol2=test_Part +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part' PREHOOK: type: QUERY PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1322974919/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-03_540_6835803304251773686/10000 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part' POSTHOOK: type: QUERY POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1322974919/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-03_540_6835803304251773686/10000 +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] 1 test_part test_Part 2 test_part test_Part 3 test_part test_Part @@ -51,20 +53,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@hive_test_src POSTHOOK: Output: default@hive_test_dst@pcol1=test_part/pcol2=test_Part +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part' PREHOOK: type: QUERY -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/125968681/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_554_9078752117043073171/10000 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part' POSTHOOK: type: QUERY -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/125968681/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_554_9078752117043073171/10000 +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: select * from hive_test_dst where pcol1='test_part' PREHOOK: type: QUERY PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1540188393/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_600_8139361045165654803/10000 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' POSTHOOK: type: QUERY POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1540188393/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_600_8139361045165654803/10000 +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] 1 test_part test_Part 2 test_part test_Part 3 test_part test_Part @@ -73,23 +81,31 @@ 6 test_part test_Part PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part' PREHOOK: type: QUERY -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/950500880/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_669_6324718497433995838/10000 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part' POSTHOOK: type: QUERY -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/950500880/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_669_6324718497433995838/10000 +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: select * from hive_test_dst where pcol1='test_Part' PREHOOK: type: QUERY -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1398711311/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_714_4624886367999695564/10000 POSTHOOK: query: select * from hive_test_dst where pcol1='test_Part' POSTHOOK: type: QUERY -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1398711311/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-06_714_4624886367999695564/10000 +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: drop table hive_test_src PREHOOK: type: DROPTABLE POSTHOOK: query: drop table hive_test_src POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@hive_test_src +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: drop table hive_test_dst PREHOOK: type: DROPTABLE POSTHOOK: query: drop table hive_test_dst POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@hive_test_dst +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE null[(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/udf_testlength2.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_testlength2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_testlength2.q.out (working copy) @@ -31,14 +31,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1473790346/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-23-06_453_2488114299399748912/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1473790346/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-23-06_453_2488114299399748912/10000 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 7 6 7 @@ -543,3 +545,4 @@ PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION testlength2 POSTHOOK: type: DROPFUNCTION +POSTHOOK: Lineage: dest1.len SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/udf4.q.out =================================================================== --- ql/src/test/results/clientpositive/udf4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf4.q.out (working copy) @@ -11,12 +11,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION round 1.0)) (TOK_SELEXPR (TOK_FUNCTION round 1.5)) (TOK_SELEXPR (TOK_FUNCTION round (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION floor 1.0)) (TOK_SELEXPR (TOK_FUNCTION floor 1.5)) (TOK_SELEXPR (TOK_FUNCTION floor (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION sqrt 1.0)) (TOK_SELEXPR (TOK_FUNCTION sqrt (- 1.0))) (TOK_SELEXPR (TOK_FUNCTION sqrt 0.0)) (TOK_SELEXPR (TOK_FUNCTION ceil 1.0)) (TOK_SELEXPR (TOK_FUNCTION ceil 1.5)) (TOK_SELEXPR (TOK_FUNCTION ceil (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION ceiling 1.0)) (TOK_SELEXPR (TOK_FUNCTION rand 3)) (TOK_SELEXPR (+ 3)) (TOK_SELEXPR (- 3)) (TOK_SELEXPR (+ 1 (+ 2))) (TOK_SELEXPR (+ 1 (- 2))) (TOK_SELEXPR (~ 1))))) @@ -87,9 +89,10 @@ PREHOOK: query: SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/532969237/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-11_666_8421450805476117371/10000 POSTHOOK: query: SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/532969237/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-11_666_8421450805476117371/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 1 2 -2 1 1 -2 1.0 NULL 0.0 1 2 -1 1 0.731057369148862 3 -3 3 -1 -2 Index: ql/src/test/results/clientpositive/join38.q.out =================================================================== --- ql/src/test/results/clientpositive/join38.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join38.q.out (working copy) @@ -15,14 +15,38 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tmp +POSTHOOK: Lineage: tmp.col11 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col10 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col9 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col8 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col7 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col6 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col5 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col4 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col2 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from tmp PREHOOK: type: QUERY PREHOOK: Input: default@tmp -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/88537684/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-58_094_3916174625732514883/10000 POSTHOOK: query: select * from tmp POSTHOOK: type: QUERY POSTHOOK: Input: default@tmp -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/88537684/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-58_094_3916174625732514883/10000 +POSTHOOK: Lineage: tmp.col11 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col10 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col9 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col8 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col7 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col6 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col5 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col4 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col2 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 100 101 102.0 103.0 104.0 105 106.0 107.0 108.0 109.0 110.0 111 100 101 102.0 103.0 104.0 105 106.0 107.0 108.0 109.0 110.0 111 PREHOOK: query: explain @@ -37,6 +61,18 @@ where b.col11 = 111 group by a.value, b.col5 POSTHOOK: type: QUERY +POSTHOOK: Lineage: tmp.col11 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col10 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col9 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col8 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col7 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col6 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col5 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col4 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col2 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src a) (TOK_TABREF tmp b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) col11)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) col5)) (TOK_SELEXPR (TOK_FUNCTION count 1) count)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL b) col11) 111)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL b) col5)))) @@ -106,7 +142,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1516703935/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-58_151_3256898399766088154/10002 Select Operator expressions: expr: _col1 @@ -194,7 +230,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@tmp PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/666881903/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-58_594_2964755163165458815/10000 POSTHOOK: query: FROM src a JOIN tmp b ON (a.key = b.col11) SELECT /*+ MAPJOIN(a) */ a.value, b.col5, count(1) as count where b.col11 = 111 @@ -202,10 +238,34 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tmp POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/666881903/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-58_594_2964755163165458815/10000 +POSTHOOK: Lineage: tmp.col11 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col10 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col9 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col8 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col7 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col6 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col5 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col4 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col2 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] val_111 105 2 PREHOOK: query: drop table tmp PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tmp POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tmp +POSTHOOK: Lineage: tmp.col11 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col10 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col9 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col8 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col7 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col6 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col5 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col4 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col2 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp.col0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby4.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby4.q.out (working copy) @@ -58,7 +58,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/717106764/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-46_431_8848177240148923338/10002 Reduce Output Operator key expressions: expr: _col0 @@ -111,14 +111,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/939209945/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-52_872_4158633501416394435/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/939209945/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-52_872_4158633501416394435/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 2 Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy) @@ -184,7 +184,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -195,12 +195,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -248,7 +248,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -259,12 +259,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -273,13 +273,13 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket20.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket21.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -293,13 +293,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349275 + transient_lastDdlTime 1269536792 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -311,17 +311,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349275 + transient_lastDdlTime 1269536792 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -335,13 +335,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349275 + transient_lastDdlTime 1269536792 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -353,13 +353,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349275 + transient_lastDdlTime 1269536792 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -371,14 +371,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -388,20 +388,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -417,9 +417,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -430,12 +430,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -446,12 +446,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -460,7 +460,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-37_967_2403296250341215969/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-34_656_8089330545359536908/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -471,12 +471,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -501,14 +501,20 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-57_195_783118113166525087/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-44_364_4547597179414852088/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-57_195_783118113166525087/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-44_364_4547597179414852088/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 928 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -520,6 +526,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b @@ -538,14 +550,32 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-30_661_8767326236900506639/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-59_788_3318686816964207623/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-30_661_8767326236900506639/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-59_788_3318686816964207623/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 928 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -557,20 +587,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-45_205_4618777774183767354/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-06_003_8922602165660854122/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-45_205_4618777774183767354/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-06_003_8922602165660854122/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] 0 0 0 PREHOOK: query: explain extended insert overwrite table bucketmapjoin_tmp_result @@ -584,6 +638,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcbucket_mapjoin a) (TOK_TABREF srcbucket_mapjoin_part_2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB bucketmapjoin_tmp_result)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))))) @@ -634,7 +700,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -645,12 +711,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -698,7 +764,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -709,12 +775,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -723,13 +789,13 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -743,13 +809,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536793 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -761,17 +827,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536793 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -785,13 +851,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536793 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -803,13 +869,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536793 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -821,14 +887,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -838,20 +904,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -867,9 +933,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -880,12 +946,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -896,12 +962,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -910,7 +976,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-15-53_656_392872046099932790/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-09_262_8525335869707579235/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -921,12 +987,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349277 + transient_lastDdlTime 1269536794 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -951,14 +1017,44 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-16-08_365_5770438645965845579/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-16_913_2625396345348246399/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-16-08_365_5770438645965845579/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-16_913_2625396345348246399/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] 0 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -970,6 +1066,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b @@ -988,14 +1102,56 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-16-45_334_7493762164796370268/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-30_201_7688988315963016670/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-16-45_334_7493762164796370268/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-30_201_7688988315963016670/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] 0 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -1007,48 +1163,240 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-16-58_278_4065823787826103572/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-36_318_4720683375994119805/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-16-58_278_4065823787826103572/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-36_318_4720683375994119805/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] NULL NULL NULL PREHOOK: query: drop table bucketmapjoin_hash_result_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table bucketmapjoin_hash_result_1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table bucketmapjoin_tmp_result PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_tmp_result POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/join24.q.out =================================================================== --- ql/src/test/results/clientpositive/join24.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join24.q.out (working copy) @@ -13,17 +13,23 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tst1 +POSTHOOK: Lineage: tst1.cnt UDAF null[(src)a.null, ] +POSTHOOK: Lineage: tst1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT sum(a.cnt) FROM tst1 a JOIN tst1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@tst1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1057588423/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-30-59_359_6163656824130501134/10000 POSTHOOK: query: SELECT sum(a.cnt) FROM tst1 a JOIN tst1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@tst1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1057588423/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-30-59_359_6163656824130501134/10000 +POSTHOOK: Lineage: tst1.cnt UDAF null[(src)a.null, ] +POSTHOOK: Lineage: tst1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] 500 PREHOOK: query: drop table tst1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tst1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tst1 +POSTHOOK: Lineage: tst1.cnt UDAF null[(src)a.null, ] +POSTHOOK: Lineage: tst1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby7_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby7_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby7_map.q.out (working copy) @@ -130,7 +130,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/325462154/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-20_534_42633561362468536/10004 Reduce Output Operator key expressions: expr: _col0 @@ -201,14 +201,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2039006865/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-27_208_8784040858563012736/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2039006865/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-27_208_8784040858563012736/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 @@ -521,11 +529,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1058410180/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-27_258_7763995930380417681/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1058410180/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-27_258_7763995930380417681/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -45,7 +45,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -81,7 +81,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -94,9 +94,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [y] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [y] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -107,12 +107,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451316 + transient_lastDdlTime 1269538368 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -123,12 +123,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451316 + transient_lastDdlTime 1269538368 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -136,7 +136,7 @@ Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10002 Select Operator expressions: expr: _col0 @@ -192,10 +192,10 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10002] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -211,7 +211,7 @@ columns _col0,_col1,_col3 columns.types string,string,string escape.delim \ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -225,13 +225,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451315 + transient_lastDdlTime 1269538366 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -242,13 +242,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451315 + transient_lastDdlTime 1269538366 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -273,7 +273,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -284,12 +284,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451317 + transient_lastDdlTime 1269538368 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -299,7 +299,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -309,15 +309,15 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451317 + transient_lastDdlTime 1269538368 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-57_593_3207106827872211508/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-48_737_1044006217988160488/10001 PREHOOK: query: INSERT OVERWRITE TABLE dest_j1 @@ -338,14 +338,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-08_219_6153631152480059916/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-55_729_5766030242518747631/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-08_219_6153631152480059916/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-55_729_5766030242518747631/10000 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 146 val_146 val_146 146 val_146 val_146 146 val_146 val_146 @@ -436,3 +442,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -69,7 +69,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10004 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10004 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -80,12 +80,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450795 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -120,7 +120,7 @@ File Output Operator compressed: false GlobalTableId: 2 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10005 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10005 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -131,22 +131,22 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 name dest2 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450796 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [srcpart] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [srcpart] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -160,13 +160,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450793 + transient_lastDdlTime 1269537987 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -177,17 +177,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450793 + transient_lastDdlTime 1269537987 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -201,13 +201,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450793 + transient_lastDdlTime 1269537987 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,13 +218,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450793 + transient_lastDdlTime 1269537987 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -236,14 +236,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10004 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10004 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -253,20 +253,20 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450795 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10001 Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -284,9 +284,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10004 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10004] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10004 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10004] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10004 Partition base file name: 10004 input format: org.apache.hadoop.mapred.TextInputFormat @@ -297,12 +297,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450795 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -313,12 +313,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450795 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -327,7 +327,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -338,12 +338,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450795 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -356,14 +356,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10005 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10002 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10005 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10002 Stage: Stage-1 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10002 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -373,20 +373,20 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 name dest2 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450796 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10003 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10003 Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -404,9 +404,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10005 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10005] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10005 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10005] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10005 Partition base file name: 10005 input format: org.apache.hadoop.mapred.TextInputFormat @@ -417,12 +417,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 name dest2 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450796 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -433,12 +433,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 name dest2 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450796 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 name: dest2 @@ -447,7 +447,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-16_048_1871077275101052105/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-29_103_8866402423753022479/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -458,12 +458,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 name dest2 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450796 + transient_lastDdlTime 1269537989 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 TotalFiles: 1 @@ -486,14 +486,30 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 sort by key,value,ds,hr PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-29_652_3418508444889016665/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-38_972_4366346008159989382/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 sort by key,value,ds,hr POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-29_652_3418508444889016665/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-38_972_4366346008159989382/10000 +POSTHOOK: Lineage: dest1.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] 0 val_0 12 2008-04-08 0 val_0 12 2008-04-08 0 val_0 12 2008-04-08 @@ -581,11 +597,19 @@ PREHOOK: query: SELECT dest2.* FROM dest2 sort by key,value,ds,hr PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-33_205_4473178795470477991/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-42_017_718548678197776696/10000 POSTHOOK: query: SELECT dest2.* FROM dest2 sort by key,value,ds,hr POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-33_205_4473178795470477991/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-42_017_718548678197776696/10000 +POSTHOOK: Lineage: dest1.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] 0 val_0 12 2008-04-09 0 val_0 12 2008-04-09 0 val_0 12 2008-04-09 @@ -675,3 +699,11 @@ POSTHOOK: query: drop table dest2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/input38.q.out =================================================================== --- ql/src/test/results/clientpositive/input38.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input38.q.out (working copy) @@ -79,7 +79,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/2100713798/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-26_985_1518875800930620674/10000 Stage: Stage-0 Move Operator @@ -94,7 +94,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/331742220/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-26_985_1518875800930620674/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -136,14 +136,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1159389345/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-30_430_7159503707453369136/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1159389345/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-30_430_7159503707453369136/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 3 7 86 val_86 3 7 311 val_311 3 7 @@ -649,3 +653,5 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/mapreduce5.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce5.q.out (working copy) @@ -113,14 +113,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2004793034/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-21_064_1705761125827650831/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2004793034/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-21_064_1705761125827650831/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 490 49 0 val_490 491 49 1 val_491 492 49 2 val_492 Index: ql/src/test/results/clientpositive/create_view.q.out =================================================================== --- ql/src/test/results/clientpositive/create_view.q.out (revision 927279) +++ ql/src/test/results/clientpositive/create_view.q.out (working copy) @@ -81,64 +81,64 @@ PREHOOK: query: SELECT * FROM src WHERE key=86 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-31_829_5522770563042118284/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-24_258_8706778397568857777/10000 POSTHOOK: query: SELECT * FROM src WHERE key=86 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-31_829_5522770563042118284/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-24_258_8706778397568857777/10000 86 val_86 PREHOOK: query: CREATE VIEW view1 AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-35_916_9116506130478692569/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_143_4992960715585447092/10000 POSTHOOK: query: CREATE VIEW view1 AS SELECT value FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-35_916_9116506130478692569/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_143_4992960715585447092/10000 POSTHOOK: Output: default@view1 PREHOOK: query: CREATE VIEW view2 AS SELECT * FROM src PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-35_993_4047675808022900939/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_184_5825097842368358482/10000 POSTHOOK: query: CREATE VIEW view2 AS SELECT * FROM src POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-35_993_4047675808022900939/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_184_5825097842368358482/10000 POSTHOOK: Output: default@view2 PREHOOK: query: CREATE VIEW view3(valoo) TBLPROPERTIES ("fear" = "factor") AS SELECT upper(value) FROM src WHERE key=86 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-36_043_2421454012035379960/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_227_8631260256662802058/10000 POSTHOOK: query: CREATE VIEW view3(valoo) TBLPROPERTIES ("fear" = "factor") AS SELECT upper(value) FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-36_043_2421454012035379960/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_227_8631260256662802058/10000 POSTHOOK: Output: default@view3 PREHOOK: query: SELECT * from view1 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-36_116_536565027821657675/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_272_2841320249450011831/10000 POSTHOOK: query: SELECT * from view1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-36_116_536565027821657675/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-27_272_2841320249450011831/10000 val_86 PREHOOK: query: SELECT * from view2 where key=18 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-39_923_6973273744819999290/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-30_243_4626894380187391951/10000 POSTHOOK: query: SELECT * from view2 where key=18 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-39_923_6973273744819999290/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-30_243_4626894380187391951/10000 18 val_18 18 val_18 PREHOOK: query: SELECT * from view3 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-43_651_1407751020665384270/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-33_394_4850595534476210007/10000 POSTHOOK: query: SELECT * from view3 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-43_651_1407751020665384270/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-33_394_4850595534476210007/10000 VAL_86 PREHOOK: query: -- test EXPLAIN output for CREATE VIEW EXPLAIN @@ -239,7 +239,7 @@ POSTHOOK: type: DESCTABLE value string -Detailed Table Information Table(tableName:view1, dbName:default, owner:jsichi, createTime:1269031175, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031175}, viewOriginalText:SELECT value FROM src WHERE key=86, viewExpandedText:SELECT `src`.`value` FROM `src` WHERE `src`.`key`=86, tableType:VIRTUAL_VIEW) +Detailed Table Information Table(tableName:view1, dbName:default, owner:athusoo, createTime:1269537027, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537027}, viewOriginalText:SELECT value FROM src WHERE key=86, viewExpandedText:SELECT `src`.`value` FROM `src` WHERE `src`.`key`=86, tableType:VIRTUAL_VIEW) PREHOOK: query: DESCRIBE view2 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE view2 @@ -253,7 +253,7 @@ key string value string -Detailed Table Information Table(tableName:view2, dbName:default, owner:jsichi, createTime:1269031176, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031176}, viewOriginalText:SELECT * FROM src, viewExpandedText:SELECT `src`.`key`, `src`.`value` FROM `src`, tableType:VIRTUAL_VIEW) +Detailed Table Information Table(tableName:view2, dbName:default, owner:athusoo, createTime:1269537027, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537027}, viewOriginalText:SELECT * FROM src, viewExpandedText:SELECT `src`.`key`, `src`.`value` FROM `src`, tableType:VIRTUAL_VIEW) PREHOOK: query: DESCRIBE view3 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE view3 @@ -265,7 +265,7 @@ POSTHOOK: type: DESCTABLE valoo string -Detailed Table Information Table(tableName:view3, dbName:default, owner:jsichi, createTime:1269031176, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:valoo, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031176,fear=factor}, viewOriginalText:SELECT upper(value) FROM src WHERE key=86, viewExpandedText:SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `src` WHERE `src`.`key`=86) `view3`, tableType:VIRTUAL_VIEW) +Detailed Table Information Table(tableName:view3, dbName:default, owner:athusoo, createTime:1269537027, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:valoo, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537027,fear=factor}, viewOriginalText:SELECT upper(value) FROM src WHERE key=86, viewExpandedText:SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `src` WHERE `src`.`key`=86) `view3`, tableType:VIRTUAL_VIEW) PREHOOK: query: ALTER VIEW view3 SET TBLPROPERTIES ("biggest" = "loser") PREHOOK: type: ALTERVIEW_PROPERTIES POSTHOOK: query: ALTER VIEW view3 SET TBLPROPERTIES ("biggest" = "loser") @@ -278,7 +278,7 @@ POSTHOOK: type: DESCTABLE valoo string -Detailed Table Information Table(tableName:view3, dbName:default, owner:jsichi, createTime:1269031176, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:valoo, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=jsichi,last_modified_time=1269031188,biggest=loser,transient_lastDdlTime=1269031188,fear=factor}, viewOriginalText:SELECT upper(value) FROM src WHERE key=86, viewExpandedText:SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `src` WHERE `src`.`key`=86) `view3`, tableType:VIRTUAL_VIEW) +Detailed Table Information Table(tableName:view3, dbName:default, owner:athusoo, createTime:1269537027, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:valoo, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=athusoo,last_modified_time=1269537037,biggest=loser,transient_lastDdlTime=1269537037,fear=factor}, viewOriginalText:SELECT upper(value) FROM src WHERE key=86, viewExpandedText:SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `src` WHERE `src`.`key`=86) `view3`, tableType:VIRTUAL_VIEW) PREHOOK: query: CREATE TABLE table1 (key int) PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE table1 (key int) @@ -294,7 +294,7 @@ POSTHOOK: type: DESCTABLE key int -Detailed Table Information Table(tableName:table1, dbName:default, owner:jsichi, createTime:1269031189, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/Users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031189}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:table1, dbName:default, owner:athusoo, createTime:1269537037, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537037}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: DESCRIBE EXTENDED src1 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED src1 @@ -302,7 +302,7 @@ key string default value string default -Detailed Table Information Table(tableName:src1, dbName:default, owner:null, createTime:1269031170, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:file:/Users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031170}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:src1, dbName:default, owner:null, createTime:1269537023, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537023}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: -- use DESCRIBE EXTENDED on a base table as a point of comparison for -- view descriptions DESCRIBE EXTENDED table1 @@ -313,7 +313,7 @@ POSTHOOK: type: DESCTABLE key int -Detailed Table Information Table(tableName:table1, dbName:default, owner:jsichi, createTime:1269031189, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/Users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031189}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:table1, dbName:default, owner:athusoo, createTime:1269537037, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537037}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key FROM src WHERE key = 86 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -322,35 +322,40 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM table1 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-53_774_7135536591732307384/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-40_567_5677063303157022037/10000 POSTHOOK: query: SELECT * FROM table1 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-53_774_7135536591732307384/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-40_567_5677063303157022037/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 PREHOOK: query: CREATE VIEW view4 AS SELECT * FROM table1 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-53_993_7516410062227097116/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-40_611_3046187209399425603/10000 POSTHOOK: query: CREATE VIEW view4 AS SELECT * FROM table1 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-53_993_7516410062227097116/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-40_611_3046187209399425603/10000 POSTHOOK: Output: default@view4 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM view4 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-54_142_2493724057451574810/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-40_651_3909125366451237234/10000 POSTHOOK: query: SELECT * FROM view4 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-54_142_2493724057451574810/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-40_651_3909125366451237234/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 PREHOOK: query: DESCRIBE view4 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE view4 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key int PREHOOK: query: ALTER TABLE table1 ADD COLUMNS (value STRING) PREHOOK: type: ALTERTABLE_ADDCOLS @@ -358,57 +363,65 @@ POSTHOOK: type: ALTERTABLE_ADDCOLS POSTHOOK: Input: default@table1 POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM table1 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-59_541_2756881245634210389/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-43_568_7414232913430037670/10000 POSTHOOK: query: SELECT * FROM table1 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-59_541_2756881245634210389/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-43_568_7414232913430037670/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 NULL PREHOOK: query: SELECT * FROM view4 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-59_598_7493857995999780615/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-43_611_4765125621071077338/10000 POSTHOOK: query: SELECT * FROM view4 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-39-59_598_7493857995999780615/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-43_611_4765125621071077338/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 PREHOOK: query: DESCRIBE table1 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE table1 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key int value string PREHOOK: query: DESCRIBE view4 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE view4 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key int PREHOOK: query: CREATE VIEW view5 AS SELECT v1.key as key1, v2.key as key2 FROM view4 v1 join view4 v2 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-07_020_4290536384418559444/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-46_538_4813346596989317081/10000 POSTHOOK: query: CREATE VIEW view5 AS SELECT v1.key as key1, v2.key as key2 FROM view4 v1 join view4 v2 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-07_020_4290536384418559444/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-46_538_4813346596989317081/10000 POSTHOOK: Output: default@view5 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM view5 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-07_409_325682480297121398/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-46_622_4361387693685530925/10000 POSTHOOK: query: SELECT * FROM view5 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-07_409_325682480297121398/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-46_622_4361387693685530925/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 86 PREHOOK: query: DESCRIBE view5 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE view5 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key1 int key2 int PREHOOK: query: -- verify that column name and comment in DDL portion @@ -416,18 +429,20 @@ CREATE VIEW view6(valoo COMMENT 'I cannot spell') AS SELECT upper(value) as blarg FROM src WHERE key=86 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-14_622_2595796651475769469/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-50_014_3278344313701601472/10000 POSTHOOK: query: -- verify that column name and comment in DDL portion -- overrides column alias in SELECT CREATE VIEW view6(valoo COMMENT 'I cannot spell') AS SELECT upper(value) as blarg FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-14_622_2595796651475769469/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-50_014_3278344313701601472/10000 POSTHOOK: Output: default@view6 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE view6 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE view6 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] valoo string I cannot spell PREHOOK: query: -- verify that ORDER BY and LIMIT are both supported in view def CREATE VIEW view7 AS @@ -436,7 +451,7 @@ ORDER BY key, value LIMIT 10 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-14_748_2736084793566193692/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-50_119_2053540616685981826/10000 POSTHOOK: query: -- verify that ORDER BY and LIMIT are both supported in view def CREATE VIEW view7 AS SELECT * FROM src @@ -444,16 +459,18 @@ ORDER BY key, value LIMIT 10 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-14_748_2736084793566193692/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-50_119_2053540616685981826/10000 POSTHOOK: Output: default@view7 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM view7 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-14_807_1304405362451529591/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-50_167_3432507992534122198/10000 POSTHOOK: query: SELECT * FROM view7 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-14_807_1304405362451529591/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-50_167_3432507992534122198/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 82 val_82 83 val_83 83 val_83 @@ -470,14 +487,15 @@ SELECT * FROM view7 ORDER BY key DESC, value PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-19_910_2408062205164076125/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-53_466_3289447808859006338/10000 POSTHOOK: query: -- top-level ORDER BY should override the one inside the view -- (however, the inside ORDER BY should still influence the evaluation -- of the limit) SELECT * FROM view7 ORDER BY key DESC, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-19_910_2408062205164076125/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-53_466_3289447808859006338/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 90 val_90 90 val_90 87 val_87 @@ -492,12 +510,13 @@ SELECT * FROM view7 LIMIT 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-34_640_7311022111308342987/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-59_547_3944713474622008452/10000 POSTHOOK: query: -- top-level LIMIT should override if lower SELECT * FROM view7 LIMIT 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-34_640_7311022111308342987/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-59_547_3944713474622008452/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 82 val_82 83 val_83 83 val_83 @@ -507,12 +526,13 @@ SELECT * FROM view7 LIMIT 20 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-41_278_3586252452274670959/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-02_747_7813583323007912815/10000 POSTHOOK: query: -- but not if higher SELECT * FROM view7 LIMIT 20 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-41_278_3586252452274670959/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-02_747_7813583323007912815/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 82 val_82 83 val_83 83 val_83 @@ -531,34 +551,38 @@ CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate' POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: CREATE VIEW view8(c) AS SELECT test_translate('abc', 'a', 'b') FROM table1 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-46_628_7211245422581815274/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-05_952_6797299684714465444/10000 POSTHOOK: query: CREATE VIEW view8(c) AS SELECT test_translate('abc', 'a', 'b') FROM table1 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-46_628_7211245422581815274/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-05_952_6797299684714465444/10000 POSTHOOK: Output: default@view8 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view8 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view8 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] c string -Detailed Table Information Table(tableName:view8, dbName:default, owner:jsichi, createTime:1269031246, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031246}, viewOriginalText:SELECT test_translate('abc', 'a', 'b') +Detailed Table Information Table(tableName:view8, dbName:default, owner:athusoo, createTime:1269537065, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537065}, viewOriginalText:SELECT test_translate('abc', 'a', 'b') FROM table1, viewExpandedText:SELECT `_c0` AS `c` FROM (SELECT `test_translate`('abc', 'a', 'b') FROM `table1`) `view8`, tableType:VIRTUAL_VIEW) PREHOOK: query: SELECT * FROM view8 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-47_090_5480733923385875308/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-06_062_1933293713695450905/10000 POSTHOOK: query: SELECT * FROM view8 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-47_090_5480733923385875308/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-06_062_1933293713695450905/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] bbc PREHOOK: query: -- test usage of a UDAF within a view CREATE TEMPORARY FUNCTION test_max AS @@ -568,62 +592,69 @@ CREATE TEMPORARY FUNCTION test_max AS 'org.apache.hadoop.hive.ql.udf.UDAFTestMax' POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: CREATE VIEW view9(m) AS SELECT test_max(length(value)) FROM src PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-53_624_5851403487989122721/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-09_020_1572540846489801791/10000 POSTHOOK: query: CREATE VIEW view9(m) AS SELECT test_max(length(value)) FROM src POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-53_624_5851403487989122721/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-09_020_1572540846489801791/10000 POSTHOOK: Output: default@view9 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view9 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view9 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] m int -Detailed Table Information Table(tableName:view9, dbName:default, owner:jsichi, createTime:1269031253, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:m, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031253}, viewOriginalText:SELECT test_max(length(value)) +Detailed Table Information Table(tableName:view9, dbName:default, owner:athusoo, createTime:1269537069, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:m, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537069}, viewOriginalText:SELECT test_max(length(value)) FROM src, viewExpandedText:SELECT `_c0` AS `m` FROM (SELECT `test_max`(length(`src`.`value`)) FROM `src`) `view9`, tableType:VIRTUAL_VIEW) PREHOOK: query: SELECT * FROM view9 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-53_999_8114360891242525813/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-09_144_2320974384478972290/10000 POSTHOOK: query: SELECT * FROM view9 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-53_999_8114360891242525813/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-09_144_2320974384478972290/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 7 PREHOOK: query: -- test usage of a subselect within a view CREATE VIEW view10 AS SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-59_290_1245187752203524003/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-12_355_6267508791911802269/10000 POSTHOOK: query: -- test usage of a subselect within a view CREATE VIEW view10 AS SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-59_290_1245187752203524003/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-12_355_6267508791911802269/10000 POSTHOOK: Output: default@view10 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view10 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view10 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string value string -Detailed Table Information Table(tableName:view10, dbName:default, owner:jsichi, createTime:1269031259, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031259}, viewOriginalText:SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp, viewExpandedText:SELECT `slurp`.`key`, `slurp`.`value` FROM (SELECT `src`.`key`, `src`.`value` FROM `src` WHERE `src`.`key`=86) `slurp`, tableType:VIRTUAL_VIEW) +Detailed Table Information Table(tableName:view10, dbName:default, owner:athusoo, createTime:1269537072, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537072}, viewOriginalText:SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp, viewExpandedText:SELECT `slurp`.`key`, `slurp`.`value` FROM (SELECT `src`.`key`, `src`.`value` FROM `src` WHERE `src`.`key`=86) `slurp`, tableType:VIRTUAL_VIEW) PREHOOK: query: SELECT * FROM view10 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-59_440_5022267836249888217/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-12_470_2815990812671795305/10000 POSTHOOK: query: SELECT * FROM view10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-40-59_440_5022267836249888217/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-12_470_2815990812671795305/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 PREHOOK: query: -- test usage of a UDTF within a view CREATE TEMPORARY FUNCTION test_explode AS @@ -633,34 +664,38 @@ CREATE TEMPORARY FUNCTION test_explode AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode' POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: CREATE VIEW view11 AS SELECT test_explode(array(1,2,3)) AS (boom) FROM table1 PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-04_999_5099365254709969398/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-15_499_1712843735507247650/10000 POSTHOOK: query: CREATE VIEW view11 AS SELECT test_explode(array(1,2,3)) AS (boom) FROM table1 POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-04_999_5099365254709969398/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-15_499_1712843735507247650/10000 POSTHOOK: Output: default@view11 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view11 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view11 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] boom int -Detailed Table Information Table(tableName:view11, dbName:default, owner:jsichi, createTime:1269031265, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:boom, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031265}, viewOriginalText:SELECT test_explode(array(1,2,3)) AS (boom) +Detailed Table Information Table(tableName:view11, dbName:default, owner:athusoo, createTime:1269537075, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:boom, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537075}, viewOriginalText:SELECT test_explode(array(1,2,3)) AS (boom) FROM table1, viewExpandedText:SELECT `test_explode`(array(1,2,3)) AS (`boom`) FROM `table1`, tableType:VIRTUAL_VIEW) PREHOOK: query: SELECT * FROM view11 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-05_325_5076134622461852994/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-15_608_2737310037483137245/10000 POSTHOOK: query: SELECT * FROM view11 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-05_325_5076134622461852994/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-15_608_2737310037483137245/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 1 2 3 @@ -668,78 +703,85 @@ CREATE VIEW view12 AS SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-09_967_36344172947660792/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-18_512_6883964919847548409/10000 POSTHOOK: query: -- test usage of LATERAL within a view CREATE VIEW view12 AS SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-09_967_36344172947660792/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-18_512_6883964919847548409/10000 POSTHOOK: Output: default@view12 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view12 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view12 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string value string mycol int -Detailed Table Information Table(tableName:view12, dbName:default, owner:jsichi, createTime:1269031270, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:mycol, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031270}, viewOriginalText:SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol, viewExpandedText:SELECT `src`.`key`, `src`.`value`, `mytable`.`mycol` FROM `src` LATERAL VIEW explode(array(1,2,3)) `myTable` AS `myCol`, tableType:VIRTUAL_VIEW) +Detailed Table Information Table(tableName:view12, dbName:default, owner:athusoo, createTime:1269537078, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:mycol, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537078}, viewOriginalText:SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol, viewExpandedText:SELECT `src`.`key`, `src`.`value`, `mytable`.`mycol` FROM `src` LATERAL VIEW explode(array(1,2,3)) `myTable` AS `myCol`, tableType:VIRTUAL_VIEW) PREHOOK: query: SELECT * FROM view12 ORDER BY key ASC, myCol ASC LIMIT 1 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-10_167_2032987467946796471/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-18_624_5045922517957323619/10000 POSTHOOK: query: SELECT * FROM view12 ORDER BY key ASC, myCol ASC LIMIT 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-10_167_2032987467946796471/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-18_624_5045922517957323619/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 1 PREHOOK: query: -- test usage of LATERAL with a view as the LHS SELECT * FROM view2 LATERAL VIEW explode(array(1,2,3)) myTable AS myCol ORDER BY key ASC, myCol ASC LIMIT 1 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-17_512_7828056991204346633/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-21_803_81846600882552084/10000 POSTHOOK: query: -- test usage of LATERAL with a view as the LHS SELECT * FROM view2 LATERAL VIEW explode(array(1,2,3)) myTable AS myCol ORDER BY key ASC, myCol ASC LIMIT 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-17_512_7828056991204346633/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-21_803_81846600882552084/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 1 PREHOOK: query: -- test usage of TABLESAMPLE within a view CREATE VIEW view13 AS SELECT s.key FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-24_638_2064161858024785627/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-25_057_5346543358244763563/10000 POSTHOOK: query: -- test usage of TABLESAMPLE within a view CREATE VIEW view13 AS SELECT s.key FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-24_638_2064161858024785627/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-25_057_5346543358244763563/10000 POSTHOOK: Output: default@view13 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view13 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view13 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key int -Detailed Table Information Table(tableName:view13, dbName:default, owner:jsichi, createTime:1269031284, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031284}, viewOriginalText:SELECT s.key +Detailed Table Information Table(tableName:view13, dbName:default, owner:athusoo, createTime:1269537085, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537085}, viewOriginalText:SELECT s.key FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s, viewExpandedText:SELECT `s`.`key` FROM `srcbucket` TABLESAMPLE (BUCKET 1 OUT OF 5 ON `key`) `s`, tableType:VIRTUAL_VIEW) PREHOOK: query: SELECT * FROM view13 ORDER BY key LIMIT 12 PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-24_755_2435823932765544817/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-25_167_3822269375715651097/10000 POSTHOOK: query: SELECT * FROM view13 ORDER BY key LIMIT 12 POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-24_755_2435823932765544817/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-25_167_3822269375715651097/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0 0 @@ -765,7 +807,7 @@ select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2 ON (unionsrc1.key = unionsrc2.key) PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-31_473_4648874002420220224/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-28_483_8181938846615586552/10000 POSTHOOK: query: -- test usage of JOIN+UNION+AGG all within same view CREATE VIEW view14 AS SELECT unionsrc1.key as k1, unionsrc1.value as v1, @@ -779,18 +821,20 @@ select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2 ON (unionsrc1.key = unionsrc2.key) POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-31_473_4648874002420220224/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-28_483_8181938846615586552/10000 POSTHOOK: Output: default@view14 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view14 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view14 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] k1 string v1 string k2 string v2 string -Detailed Table Information Table(tableName:view14, dbName:default, owner:jsichi, createTime:1269031291, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:k1, type:string, comment:null), FieldSchema(name:v1, type:string, comment:null), FieldSchema(name:k2, type:string, comment:null), FieldSchema(name:v2, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031291}, viewOriginalText:SELECT unionsrc1.key as k1, unionsrc1.value as v1, +Detailed Table Information Table(tableName:view14, dbName:default, owner:athusoo, createTime:1269537088, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:k1, type:string, comment:null), FieldSchema(name:v1, type:string, comment:null), FieldSchema(name:k2, type:string, comment:null), FieldSchema(name:v2, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537088}, viewOriginalText:SELECT unionsrc1.key as k1, unionsrc1.value as v1, unionsrc2.key as k2, unionsrc2.value as v2 FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 UNION ALL @@ -813,12 +857,13 @@ ORDER BY k1 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-31_822_7840854368339296957/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-28_652_9167762135780300262/10000 POSTHOOK: query: SELECT * FROM view14 ORDER BY k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-41-31_822_7840854368339296957/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-28_652_9167762135780300262/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 @@ -848,23 +893,25 @@ FROM src GROUP BY key PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-13_473_5955147212006822051/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-50_881_6485549141716725712/10000 POSTHOOK: query: -- test usage of GROUP BY within view CREATE VIEW view15 AS SELECT key,COUNT(value) AS value_count FROM src GROUP BY key POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-13_473_5955147212006822051/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-50_881_6485549141716725712/10000 POSTHOOK: Output: default@view15 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view15 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view15 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string value_count bigint -Detailed Table Information Table(tableName:view15, dbName:default, owner:jsichi, createTime:1269031333, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value_count, type:bigint, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031333}, viewOriginalText:SELECT key,COUNT(value) AS value_count +Detailed Table Information Table(tableName:view15, dbName:default, owner:athusoo, createTime:1269537110, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value_count, type:bigint, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537110}, viewOriginalText:SELECT key,COUNT(value) AS value_count FROM src GROUP BY key, viewExpandedText:SELECT `src`.`key`,COUNT(`src`.`value`) AS `value_count` FROM `src` @@ -874,13 +921,14 @@ LIMIT 10 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-13_967_6005376422473461002/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-50_992_5256916503707086995/10000 POSTHOOK: query: SELECT * FROM view15 ORDER BY value_count DESC, key LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-13_967_6005376422473461002/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-50_992_5256916503707086995/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 230 5 348 5 401 5 @@ -896,21 +944,23 @@ SELECT DISTINCT value FROM src PREHOOK: type: CREATEVIEW -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-24_829_3812156840588170802/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-56_892_3109642529543389997/10000 POSTHOOK: query: -- test usage of DISTINCT within view CREATE VIEW view16 AS SELECT DISTINCT value FROM src POSTHOOK: type: CREATEVIEW -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-24_829_3812156840588170802/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-56_892_3109642529543389997/10000 POSTHOOK: Output: default@view16 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED view16 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED view16 POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] value string -Detailed Table Information Table(tableName:view16, dbName:default, owner:jsichi, createTime:1269031344, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269031344}, viewOriginalText:SELECT DISTINCT value +Detailed Table Information Table(tableName:view16, dbName:default, owner:athusoo, createTime:1269537116, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537116}, viewOriginalText:SELECT DISTINCT value FROM src, viewExpandedText:SELECT DISTINCT `src`.`value` FROM `src`, tableType:VIRTUAL_VIEW) PREHOOK: query: SELECT * FROM view16 @@ -918,13 +968,14 @@ LIMIT 10 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-25_030_7581080870865599104/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-57_001_5438164195458741614/10000 POSTHOOK: query: SELECT * FROM view16 ORDER BY value LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-19_13-42-25_030_7581080870865599104/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-11-57_001_5438164195458741614/10000 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] val_0 val_10 val_100 @@ -944,95 +995,115 @@ DROP TABLE table1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view1 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view1 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view1 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view2 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view2 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view2 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view3 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view3 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view3 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view4 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view4 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view4 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view5 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view5 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view5 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view6 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view6 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view6 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view7 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view7 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view7 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view8 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view8 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view8 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view9 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view9 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view9 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view10 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view10 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view10 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view11 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view11 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view11 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view12 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view12 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view12 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view13 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view13 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view13 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view14 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view14 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view14 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view15 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view15 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view15 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP VIEW view16 PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW view16 POSTHOOK: type: DROPVIEW POSTHOOK: Output: default@view16 +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TEMPORARY FUNCTION test_translate PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION test_translate POSTHOOK: type: DROPFUNCTION +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TEMPORARY FUNCTION test_max PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION test_max POSTHOOK: type: DROPFUNCTION +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TEMPORARY FUNCTION test_explode PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION test_explode POSTHOOK: type: DROPFUNCTION +POSTHOOK: Lineage: table1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby3_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby3_noskew.q.out (working copy) @@ -159,17 +159,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/425193081/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-44_058_665383047379009214/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/425193081/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-44_058_665383047379009214/10000 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 130091.0 260.182 256.10355987055016 98.0 0.0 142.92680950752379 143.06995106518903 20428.072875999995 20469.010897795586 PREHOOK: query: DROP TABLE dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby4_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby4_map.q.out (working copy) @@ -81,12 +81,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.null, ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/311149396/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-58_556_7020436817891899871/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/311149396/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-58_556_7020436817891899871/10000 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.null, ] 500 Index: ql/src/test/results/clientpositive/join7.q.out =================================================================== --- ql/src/test/results/clientpositive/join7.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join7.q.out (working copy) @@ -268,14 +268,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c6 SIMPLE null[(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[(src)src3.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1490522994/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-27_269_4561698636205510101/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1490522994/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-27_269_4561698636205510101/10000 +POSTHOOK: Lineage: dest1.c6 SIMPLE null[(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[(src)src3.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 NULL NULL NULL NULL 12 val_12 NULL NULL NULL NULL 12 val_12 NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/rcfile_lazydecompress.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_lazydecompress.q.out (revision 927279) +++ ql/src/test/results/clientpositive/rcfile_lazydecompress.q.out (working copy) @@ -17,14 +17,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@rcfiletablelazydecompress +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 PREHOOK: type: QUERY PREHOOK: Input: default@rcfiletablelazydecompress -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1560474607/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-08_727_5374527120652264332/10000 POSTHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 POSTHOOK: type: QUERY POSTHOOK: Input: default@rcfiletablelazydecompress -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1560474607/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-08_727_5374527120652264332/10000 +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 311 val_311 409 val_409 255 val_255 @@ -33,22 +37,26 @@ PREHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 and key < 400 PREHOOK: type: QUERY PREHOOK: Input: default@rcfiletablelazydecompress -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/664979983/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-11_629_2626075330075130750/10000 POSTHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 and key < 400 POSTHOOK: type: QUERY POSTHOOK: Input: default@rcfiletablelazydecompress -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/664979983/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-11_629_2626075330075130750/10000 +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 311 val_311 255 val_255 278 val_278 PREHOOK: query: SELECT key, count(1) FROM rcfileTableLazyDecompress where key > 238 group by key PREHOOK: type: QUERY PREHOOK: Input: default@rcfiletablelazydecompress -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/364436641/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-14_581_8642600447413407784/10000 POSTHOOK: query: SELECT key, count(1) FROM rcfileTableLazyDecompress where key > 238 group by key POSTHOOK: type: QUERY POSTHOOK: Input: default@rcfiletablelazydecompress -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/364436641/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-14_581_8642600447413407784/10000 +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 255 1 278 1 311 1 @@ -64,14 +72,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@rcfiletablelazydecompress +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 PREHOOK: type: QUERY PREHOOK: Input: default@rcfiletablelazydecompress -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1876736453/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-20_862_5348938586517860575/10000 POSTHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 POSTHOOK: type: QUERY POSTHOOK: Input: default@rcfiletablelazydecompress -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1876736453/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-20_862_5348938586517860575/10000 +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 311 val_311 409 val_409 255 val_255 @@ -80,22 +96,30 @@ PREHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 and key < 400 PREHOOK: type: QUERY PREHOOK: Input: default@rcfiletablelazydecompress -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2041315134/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-23_778_2795504010268419164/10000 POSTHOOK: query: SELECT key, value FROM rcfileTableLazyDecompress where key > 238 and key < 400 POSTHOOK: type: QUERY POSTHOOK: Input: default@rcfiletablelazydecompress -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2041315134/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-23_778_2795504010268419164/10000 +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 311 val_311 255 val_255 278 val_278 PREHOOK: query: SELECT key, count(1) FROM rcfileTableLazyDecompress where key > 238 group by key PREHOOK: type: QUERY PREHOOK: Input: default@rcfiletablelazydecompress -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/798009292/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-26_756_4684407126467219084/10000 POSTHOOK: query: SELECT key, count(1) FROM rcfileTableLazyDecompress where key > 238 group by key POSTHOOK: type: QUERY POSTHOOK: Input: default@rcfiletablelazydecompress -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/798009292/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-26_756_4684407126467219084/10000 +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 255 1 278 1 311 1 @@ -106,3 +130,7 @@ POSTHOOK: query: DROP TABLE rcfileTableLazyDecompress POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@rcfiletablelazydecompress +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfiletablelazydecompress.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby7_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby7_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby7_map_skew.q.out (working copy) @@ -106,7 +106,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/480812745/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-29_621_897620491767177513/10004 Reduce Output Operator key expressions: expr: _col0 @@ -165,7 +165,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/480812745/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-29_621_897620491767177513/10005 Reduce Output Operator key expressions: expr: _col0 @@ -198,7 +198,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/480812745/10006 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-29_621_897620491767177513/10006 Reduce Output Operator key expressions: expr: _col0 @@ -269,14 +269,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1654745587/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-42_204_6270581048257239524/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1654745587/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-42_204_6270581048257239524/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 @@ -589,11 +597,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1604193796/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-42_256_1322056766908609288/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1604193796/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-42_256_1322056766908609288/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/groupby8_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby8_noskew.q.out (working copy) @@ -83,7 +83,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/607500899/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-29_404_6433404623233532740/10004 Reduce Output Operator key expressions: expr: _col0 @@ -142,7 +142,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/607500899/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-29_404_6433404623233532740/10005 Reduce Output Operator key expressions: expr: _col0 @@ -213,14 +213,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/10787028/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-38_837_252502579925777304/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/10787028/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-38_837_252502579925777304/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 @@ -533,11 +541,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1669371156/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-38_884_7871847228789574490/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1669371156/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-38_884_7871847228789574490/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/input33.q.out =================================================================== --- ql/src/test/results/clientpositive/input33.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input33.q.out (working copy) @@ -140,14 +140,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM dest1 SORT BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/643220558/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-57_665_6986659847610434025/10000 POSTHOOK: query: SELECT * FROM dest1 SORT BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/643220558/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-57_665_6986659847610434025/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 1 105_105 1 10_10 1 111_111 Index: ql/src/test/results/clientpositive/bucket1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucket1.q.out (working copy) @@ -49,9 +49,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -62,12 +62,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267129667 + transient_lastDdlTime 1269536297 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -78,12 +78,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267129667 + transient_lastDdlTime 1269536297 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -99,7 +99,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_12-27-48_091_9013372046421498540/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-17_872_2671324394738779888/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -111,12 +111,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket1_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket1_1 name bucket1_1 serialization.ddl struct bucket1_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267129668 + transient_lastDdlTime 1269536297 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket1_1 TotalFiles: 1 @@ -126,7 +126,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_12-27-48_091_9013372046421498540/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-17_872_2671324394738779888/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -137,15 +137,15 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket1_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket1_1 name bucket1_1 serialization.ddl struct bucket1_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267129668 + transient_lastDdlTime 1269536297 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket1_1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_12-27-48_091_9013372046421498540/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-17_872_2671324394738779888/10001 PREHOOK: query: insert overwrite table bucket1_1 @@ -158,14 +158,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bucket1_1 +POSTHOOK: Lineage: bucket1_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket1_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from bucket1_1 order by key PREHOOK: type: QUERY PREHOOK: Input: default@bucket1_1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_12-27-52_282_2132999711625321779/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-21_198_4347641231076554948/10000 POSTHOOK: query: select * from bucket1_1 order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucket1_1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_12-27-52_282_2132999711625321779/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-21_198_4347641231076554948/10000 +POSTHOOK: Lineage: bucket1_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket1_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -671,3 +675,5 @@ POSTHOOK: query: drop table bucket1_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucket1_1 +POSTHOOK: Lineage: bucket1_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket1_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby1_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby1_map.q.out (working copy) @@ -103,14 +103,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/233901356/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-16_617_4831671133728037370/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/233901356/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-16_617_4831671133728037370/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/quote1.q.out =================================================================== --- ql/src/test/results/clientpositive/quote1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/quote1.q.out (working copy) @@ -66,7 +66,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/278101855/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-33_826_234177382876759062/10000 Stage: Stage-0 Move Operator @@ -83,7 +83,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1002066890/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-33_826_234177382876759062/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -166,14 +166,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1@table=2008-04-08 +POSTHOOK: Lineage: dest1 PARTITION(table=2008-04-08).type SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(table=2008-04-08).location SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT `int`.`location`, `int`.`type`, `int`.`table` FROM dest1 `int` WHERE `int`.`table` = '2008-04-08' PREHOOK: type: QUERY PREHOOK: Input: default@dest1@table=2008-04-08 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1815962646/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-37_611_1619540330154848144/10000 POSTHOOK: query: SELECT `int`.`location`, `int`.`type`, `int`.`table` FROM dest1 `int` WHERE `int`.`table` = '2008-04-08' POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1@table=2008-04-08 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1815962646/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-37_611_1619540330154848144/10000 +POSTHOOK: Lineage: dest1 PARTITION(table=2008-04-08).type SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(table=2008-04-08).location SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 238 val_238 2008-04-08 255 val_255 2008-04-08 278 val_278 2008-04-08 Index: ql/src/test/results/clientpositive/notable_alias2.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/notable_alias2.q.out (working copy) @@ -117,14 +117,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.dummy SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1079706949/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-52_731_2657853465196118929/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1079706949/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-52_731_2657853465196118929/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.dummy SIMPLE null[] 1234 0 3.0 1234 10 1.0 1234 11 1.0 Index: ql/src/test/results/clientpositive/join2.q.out =================================================================== --- ql/src/test/results/clientpositive/join2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join2.q.out (working copy) @@ -153,14 +153,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_j2 +POSTHOOK: Lineage: dest_j2.value SIMPLE null[(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j2.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest_j2.* FROM dest_j2 PREHOOK: type: QUERY PREHOOK: Input: default@dest_j2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_3/build/ql/scratchdir/hive_2010-02-12_22-17-58_450_8546758638899574985/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-30-17_086_3181239128748934060/10000 POSTHOOK: query: SELECT dest_j2.* FROM dest_j2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_3/build/ql/scratchdir/hive_2010-02-12_22-17-58_450_8546758638899574985/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-30-17_086_3181239128748934060/10000 +POSTHOOK: Lineage: dest_j2.value SIMPLE null[(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j2.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out =================================================================== --- ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out (revision 927279) +++ ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out (working copy) @@ -91,14 +91,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@columnarserde_create_shortcut +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@columnarserde_create_shortcut -PREHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/1405876765/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-02_465_3808350360366374647/10000 POSTHOOK: query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@columnarserde_create_shortcut -POSTHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/1405876765/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-02_465_3808350360366374647/10000 +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -113,11 +123,16 @@ PREHOOK: query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@columnarserde_create_shortcut -PREHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/108106790/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-05_512_6561312538515091177/10000 POSTHOOK: query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@columnarserde_create_shortcut -POSTHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/108106790/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-05_512_6561312538515091177/10000 +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 @@ -134,15 +149,30 @@ POSTHOOK: query: drop table columnarserde_create_shortcut POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@columnarserde_create_shortcut +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: DROP TABLE columnShortcutTable PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE columnShortcutTable POSTHOOK: type: DROPTABLE +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@columnShortcutTable +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: FROM src INSERT OVERWRITE TABLE columnShortcutTable SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY @@ -153,20 +183,41 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@columnshortcuttable +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: describe columnShortcutTable PREHOOK: type: DESCTABLE POSTHOOK: query: describe columnShortcutTable POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] key string from deserializer value string from deserializer PREHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable PREHOOK: type: QUERY PREHOOK: Input: default@columnshortcuttable -PREHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/33712646/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-11_908_8954960864638919337/10000 POSTHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable POSTHOOK: type: QUERY POSTHOOK: Input: default@columnshortcuttable -POSTHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/33712646/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-11_908_8954960864638919337/10000 +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 238 val_238 86 val_86 311 val_311 @@ -183,14 +234,28 @@ POSTHOOK: type: ALTERTABLE_ADDCOLS POSTHOOK: Input: default@columnshortcuttable POSTHOOK: Output: default@columnshortcuttable +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable PREHOOK: type: QUERY PREHOOK: Input: default@columnshortcuttable -PREHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/137255855/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-11_998_4570524913573054432/10000 POSTHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable POSTHOOK: type: QUERY POSTHOOK: Input: default@columnshortcuttable -POSTHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/137255855/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-11_998_4570524913573054432/10000 +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 238 val_238 NULL 86 val_86 NULL 311 val_311 NULL @@ -207,14 +272,28 @@ POSTHOOK: type: ALTERTABLE_REPLACECOLS POSTHOOK: Input: default@columnshortcuttable POSTHOOK: Output: default@columnshortcuttable +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable PREHOOK: type: QUERY PREHOOK: Input: default@columnshortcuttable -PREHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/1910249486/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-12_089_5035284270003188253/10000 POSTHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable POSTHOOK: type: QUERY POSTHOOK: Input: default@columnshortcuttable -POSTHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_1/build/ql/tmp/1910249486/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-12_089_5035284270003188253/10000 +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 238 86 311 @@ -230,3 +309,10 @@ POSTHOOK: query: DROP TABLE columnShortcutTable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@columnshortcuttable +POSTHOOK: Lineage: columnshortcuttable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columnshortcuttable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: columnarserde_create_shortcut.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] Index: ql/src/test/results/clientpositive/input7.q.out =================================================================== --- ql/src/test/results/clientpositive/input7.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input7.q.out (working copy) @@ -58,7 +58,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/425177552/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-13_872_8620045661731245027/10000 Stage: Stage-0 Move Operator @@ -73,7 +73,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1768330966/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-13_872_8620045661731245027/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -107,14 +107,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/56485501/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-17_149_8495124390051069052/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/56485501/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-17_149_8495124390051069052/10000 +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] NULL 238 NULL NULL NULL 311 Index: ql/src/test/results/clientpositive/groupby5_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby5_map_skew.q.out (working copy) @@ -85,12 +85,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1364605080/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-29_961_955470938988654835/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1364605080/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-29_961_955470938988654835/10000 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 130091 Index: ql/src/test/results/clientpositive/udf5.q.out =================================================================== --- ql/src/test/results/clientpositive/udf5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf5.q.out (working copy) @@ -11,12 +11,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446340)), day('2008-11-01'), month('2008-11-01'), year('2008-11-01'), day('2008-11-01 15:32:20'), month('2008-11-01 15:32:20'), year('2008-11-01 15:32:20') FROM dest1 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446340)), day('2008-11-01'), month('2008-11-01'), year('2008-11-01'), day('2008-11-01 15:32:20'), month('2008-11-01 15:32:20'), year('2008-11-01 15:32:20') FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION from_unixtime 1226446340)) (TOK_SELEXPR (TOK_FUNCTION to_date (TOK_FUNCTION from_unixtime 1226446340))) (TOK_SELEXPR (TOK_FUNCTION day '2008-11-01')) (TOK_SELEXPR (TOK_FUNCTION month '2008-11-01')) (TOK_SELEXPR (TOK_FUNCTION year '2008-11-01')) (TOK_SELEXPR (TOK_FUNCTION day '2008-11-01 15:32:20')) (TOK_SELEXPR (TOK_FUNCTION month '2008-11-01 15:32:20')) (TOK_SELEXPR (TOK_FUNCTION year '2008-11-01 15:32:20'))))) @@ -65,11 +67,12 @@ PREHOOK: query: SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446340)), day('2008-11-01'), month('2008-11-01'), year('2008-11-01'), day('2008-11-01 15:32:20'), month('2008-11-01 15:32:20'), year('2008-11-01 15:32:20') FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1960950507/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-20_525_5646931256400793333/10000 POSTHOOK: query: SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446340)), day('2008-11-01'), month('2008-11-01'), year('2008-11-01'), day('2008-11-01 15:32:20'), month('2008-11-01 15:32:20'), year('2008-11-01 15:32:20') FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1960950507/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-20_525_5646931256400793333/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 2008-11-11 15:32:20 2008-11-11 1 11 2008 1 11 2008 PREHOOK: query: EXPLAIN SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd HH:mm:ss'), 'MM/dd/yy HH:mm:ss') from dest1 @@ -77,6 +80,7 @@ POSTHOOK: query: EXPLAIN SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd HH:mm:ss'), 'MM/dd/yy HH:mm:ss') from dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION from_unixtime (TOK_FUNCTION unix_timestamp '2010-01-13 11:57:40' 'yyyy-MM-dd HH:mm:ss') 'MM/dd/yy HH:mm:ss'))))) @@ -111,9 +115,10 @@ PREHOOK: query: SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd HH:mm:ss'), 'MM/dd/yy HH:mm:ss') from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1373998585/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-23_650_4064144427983139647/10000 POSTHOOK: query: SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd HH:mm:ss'), 'MM/dd/yy HH:mm:ss') from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/876/apache-hive/build/ql/tmp/1373998585/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-23_650_4064144427983139647/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 01/13/10 11:57:40 Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy) @@ -78,10 +78,10 @@ type: double Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -95,13 +95,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450232 + transient_lastDdlTime 1269537562 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -112,17 +112,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450232 + transient_lastDdlTime 1269537562 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -136,13 +136,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450232 + transient_lastDdlTime 1269537562 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -153,13 +153,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450232 + transient_lastDdlTime 1269537562 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -195,7 +195,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-43-55_309_3436009791783298747/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-24_407_3273654117101516787/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -206,12 +206,12 @@ columns.types string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450235 + transient_lastDdlTime 1269537564 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -221,7 +221,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-43-55_309_3436009791783298747/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-24_407_3273654117101516787/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -231,15 +231,15 @@ columns.types string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450235 + transient_lastDdlTime 1269537564 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-43-55_309_3436009791783298747/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-24_407_3273654117101516787/10001 PREHOOK: query: FROM srcpart src @@ -260,14 +260,20 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c2 UDAF null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), (srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-44-01_005_4243408771320508674/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-29_469_7527101305543716691/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-44-01_005_4243408771320508674/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-19-29_469_7527101305543716691/10000 +POSTHOOK: Lineage: dest1.c2 UDAF null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), (srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)src.FieldSchema(name:ds, type:string, comment:null), ] 0 1 00.0 1 71 132828.0 2 69 251142.0 Index: ql/src/test/results/clientpositive/create_like.q.out =================================================================== --- ql/src/test/results/clientpositive/create_like.q.out (revision 927279) +++ ql/src/test/results/clientpositive/create_like.q.out (working copy) @@ -28,7 +28,7 @@ a string b string -Detailed Table Information Table(tableName:table1, dbName:default, owner:nzhang, createTime:1254243861, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/nzhang/work/31/apache-hive-trunk/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{}) +Detailed Table Information Table(tableName:table1, dbName:default, owner:athusoo, createTime:1269537000, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537000}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: CREATE TABLE table2 LIKE table1 PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE table2 LIKE table1 @@ -47,7 +47,7 @@ a string b string -Detailed Table Information Table(tableName:table2, dbName:default, owner:nzhang, createTime:1254243861, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/nzhang/work/31/apache-hive-trunk/build/ql/test/data/warehouse/table2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE}) +Detailed Table Information Table(tableName:table2, dbName:default, owner:athusoo, createTime:1269537001, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/table2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE,transient_lastDdlTime=1269537001}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: CREATE TABLE IF NOT EXISTS table2 LIKE table1 PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE IF NOT EXISTS table2 LIKE table1 @@ -74,7 +74,7 @@ a string b string -Detailed Table Information Table(tableName:table3, dbName:default, owner:nzhang, createTime:1254243861, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/nzhang/work/31/apache-hive-trunk/build/ql/test/data/warehouse/table3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE}) +Detailed Table Information Table(tableName:table3, dbName:default, owner:athusoo, createTime:1269537001, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/table3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE,transient_lastDdlTime=1269537001}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key, value FROM src WHERE key = 86 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -83,6 +83,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: INSERT OVERWRITE TABLE table2 SELECT key, value FROM src WHERE key = 100 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -91,23 +93,35 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@table2 +POSTHOOK: Lineage: table2.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table2.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table1.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM table1 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/data/users/nzhang/work/31/apache-hive-trunk/build/ql/tmp/1104038861/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-08_405_2508506003941131709/10000 POSTHOOK: query: SELECT * FROM table1 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/data/users/nzhang/work/31/apache-hive-trunk/build/ql/tmp/1104038861/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-08_405_2508506003941131709/10000 +POSTHOOK: Lineage: table2.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table2.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table1.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 PREHOOK: query: SELECT * FROM table2 PREHOOK: type: QUERY PREHOOK: Input: default@table2 -PREHOOK: Output: file:/data/users/nzhang/work/31/apache-hive-trunk/build/ql/tmp/1673347492/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-08_457_2686684369720094791/10000 POSTHOOK: query: SELECT * FROM table2 POSTHOOK: type: QUERY POSTHOOK: Input: default@table2 -POSTHOOK: Output: file:/data/users/nzhang/work/31/apache-hive-trunk/build/ql/tmp/1673347492/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-08_457_2686684369720094791/10000 +POSTHOOK: Lineage: table2.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table2.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table1.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 100 val_100 100 val_100 PREHOOK: query: DROP TABLE table1 @@ -115,13 +129,25 @@ POSTHOOK: query: DROP TABLE table1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table2.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table2.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table1.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE table2 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE table2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table2 +POSTHOOK: Lineage: table2.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table2.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table1.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE table3 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE table3 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table3 +POSTHOOK: Lineage: table2.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table2.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table1.b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/udf_concat_insert1.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_concat_insert1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_concat_insert1.q.out (working copy) @@ -13,14 +13,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/pyang/trunk/VENDOR.hive/trunk/build/ql/tmp/65445395/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-13-10_037_7903354701066658907/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/pyang/trunk/VENDOR.hive/trunk/build/ql/tmp/65445395/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-13-10_037_7903354701066658907/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[] 1234 0 1234 10 1234 11 @@ -83,3 +87,5 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[] Index: ql/src/test/results/clientpositive/join39.q.out =================================================================== --- ql/src/test/results/clientpositive/join39.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join39.q.out (working copy) @@ -19,14 +19,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/deploy/deploy1/trunk/VENDOR.hive/trunk/build/ql/tmp/1250456003/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-34-11_801_8349690412767770076/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/deploy/deploy1/trunk/VENDOR.hive/trunk/build/ql/tmp/1250456003/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-34-11_801_8349690412767770076/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)x.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 @@ -598,3 +606,7 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby5.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby5.q.out (working copy) @@ -69,7 +69,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/711771814/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-12_300_764108028245873958/10002 Reduce Output Operator key expressions: expr: _col0 @@ -140,14 +140,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/673926321/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-18_649_3065740352765144063/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/673926321/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-18_649_3065740352765144063/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/input11_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input11_limit.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input11_limit.q.out (working copy) @@ -89,14 +89,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/343123214/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-12_877_3085351605931647025/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/343123214/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-12_877_3085351605931647025/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 27 val_27 98 val_98 Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -52,7 +52,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -63,21 +63,21 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452772 + transient_lastDdlTime 1269539525 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition base file name: srcbucket0.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -89,12 +89,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452771 + transient_lastDdlTime 1269539523 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -106,12 +106,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452771 + transient_lastDdlTime 1269539523 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -123,14 +123,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -140,20 +140,20 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452772 + transient_lastDdlTime 1269539525 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -167,9 +167,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -180,12 +180,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452772 + transient_lastDdlTime 1269539525 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -196,12 +196,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452772 + transient_lastDdlTime 1269539525 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -210,7 +210,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-12_986_8291091704728174599/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-05_030_1520666124282223634/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -221,12 +221,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452772 + transient_lastDdlTime 1269539525 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -243,14 +243,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-16_978_5748485007757864278/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-08_479_2614536684056111351/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-16_978_5748485007757864278/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-08_479_2614536684056111351/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 474 val_475 62 val_63 468 val_469 Index: ql/src/test/results/clientpositive/join25.q.out =================================================================== --- ql/src/test/results/clientpositive/join25.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join25.q.out (working copy) @@ -146,7 +146,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/664166488/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-07_631_8636464587198651302/10000 Stage: Stage-0 Move Operator @@ -161,7 +161,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1866486006/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-07_631_8636464587198651302/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -201,14 +201,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/463646390/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-11_366_8012497285275232465/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/463646390/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-11_366_8012497285275232465/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 66 val_66 val_66 98 val_98 val_98 98 val_98 val_98 @@ -251,3 +257,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/smb_mapjoin_6.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (working copy) @@ -36,6 +36,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@smb_bucket4_1 +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table smb_bucket4_2 select * from src PREHOOK: type: QUERY @@ -46,6 +48,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@smb_bucket4_2 +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key @@ -54,6 +60,10 @@ insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF smb_bucket4_1 a) (TOK_TABREF smb_bucket4_2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB smb_join_results)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR TOK_ALLCOLREF)))) @@ -134,14 +144,30 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from smb_join_results order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-23_638_3207464793867439237/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-03_455_2348447858096968436/10000 POSTHOOK: query: select * from smb_join_results order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-23_638_3207464793867439237/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-03_455_2348447858096968436/10000 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 @@ -1180,23 +1206,59 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@normal_join_results +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results PREHOOK: type: QUERY PREHOOK: Input: default@normal_join_results -PREHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-31_822_6925000698525164292/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-10_837_3442622441708372562/10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@normal_join_results -POSTHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-31_822_6925000698525164292/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-10_837_3442622441708372562/10000 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] 278697 278697 101852390308 101852390308 PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-36_044_2094101145793654521/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-14_041_6606081988318306069/10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-36_044_2094101145793654521/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-14_041_6606081988318306069/10000 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] 278697 278697 101852390308 101852390308 PREHOOK: query: explain insert overwrite table smb_join_results @@ -1206,6 +1268,18 @@ insert overwrite table smb_join_results select /*+mapjoin(b)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF smb_bucket4_1 a) (TOK_TABREF smb_bucket4_2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB smb_join_results)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST b))) (TOK_SELEXPR TOK_ALLCOLREF)))) @@ -1286,6 +1360,22 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY @@ -1298,14 +1388,54 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from smb_join_results order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-45_314_3505330141876651485/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-23_980_3962562026090752166/10000 POSTHOOK: query: select * from smb_join_results order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-45_314_3505330141876651485/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-23_980_3962562026090752166/10000 +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 @@ -2344,23 +2474,95 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@normal_join_results +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results PREHOOK: type: QUERY PREHOOK: Input: default@normal_join_results -PREHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-52_285_1158676300848339812/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-31_286_5677790032131450804/10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@normal_join_results -POSTHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-52_285_1158676300848339812/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-31_286_5677790032131450804/10000 +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] 278697 278697 101852390308 101852390308 PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-55_185_720418110923506665/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-34_506_8558555703153832453/10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-35-55_185_720418110923506665/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-34_506_8558555703153832453/10000 +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] 278697 278697 101852390308 101852390308 PREHOOK: query: explain insert overwrite table smb_join_results @@ -2370,6 +2572,30 @@ insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key where a.key>1000 POSTHOOK: type: QUERY +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF smb_bucket4_1 a) (TOK_TABREF smb_bucket4_2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB smb_join_results)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL a) key) 1000)))) @@ -2454,6 +2680,34 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: explain insert overwrite table smb_join_results select /*+mapjoin(b)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key where a.key>1000 @@ -2462,6 +2716,34 @@ insert overwrite table smb_join_results select /*+mapjoin(b)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key where a.key>1000 POSTHOOK: type: QUERY +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF smb_bucket4_1 a) (TOK_TABREF smb_bucket4_2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB smb_join_results)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST b))) (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL a) key) 1000)))) @@ -2550,12 +2832,76 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: explain select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key join smb_bucket4_2 c on b.key = c.key where a.key>1000 PREHOOK: type: QUERY POSTHOOK: query: explain select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key join smb_bucket4_2 c on b.key = c.key where a.key>1000 POSTHOOK: type: QUERY +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF smb_bucket4_1 a) (TOK_TABREF smb_bucket4_2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF smb_bucket4_2 c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST b c))) (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL a) key) 1000)))) @@ -2639,29 +2985,189 @@ PREHOOK: type: QUERY PREHOOK: Input: default@smb_bucket4_2 PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-36-05_384_2668760504635295425/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-45_743_4217702787996459087/10000 POSTHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key join smb_bucket4_2 c on b.key = c.key where a.key>1000 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/data/users/njain/hive_commit2/build/ql/scratchdir/hive_2010-03-23_15-36-05_384_2668760504635295425/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-07-45_743_4217702787996459087/10000 +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table smb_join_results PREHOOK: type: DROPTABLE POSTHOOK: query: drop table smb_join_results POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table normal_join_results PREHOOK: type: DROPTABLE POSTHOOK: query: drop table normal_join_results POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@normal_join_results +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table smb_bucket4_1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table smb_bucket4_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@smb_bucket4_1 +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table smb_bucket4_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table smb_bucket4_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@smb_bucket4_2 +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/null_column.q.out =================================================================== --- ql/src/test/results/clientpositive/null_column.q.out (revision 927279) +++ ql/src/test/results/clientpositive/null_column.q.out (working copy) @@ -23,11 +23,11 @@ PREHOOK: query: select null, null from temp_null PREHOOK: type: QUERY PREHOOK: Input: default@temp_null -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1637717703/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-55_220_6538699375994347913/10000 POSTHOOK: query: select null, null from temp_null POSTHOOK: type: QUERY POSTHOOK: Input: default@temp_null -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1637717703/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-55_220_6538699375994347913/10000 NULL NULL NULL NULL NULL NULL @@ -47,14 +47,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@temp_null POSTHOOK: Output: default@tt +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] PREHOOK: query: select * from tt PREHOOK: type: QUERY PREHOOK: Input: default@tt -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/974669501/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-43-01_154_6314348874783237740/10000 POSTHOOK: query: select * from tt POSTHOOK: type: QUERY POSTHOOK: Input: default@tt -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/974669501/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-43-01_154_6314348874783237740/10000 +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] NULL NULL NULL NULL NULL NULL @@ -66,6 +70,8 @@ POSTHOOK: query: create table tt_b(a int, b string) row format serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tt_b +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] PREHOOK: query: insert overwrite table tt_b select null, null from temp_null PREHOOK: type: QUERY PREHOOK: Input: default@temp_null @@ -74,14 +80,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@temp_null POSTHOOK: Output: default@tt_b +POSTHOOK: Lineage: tt_b.b SIMPLE null[] +POSTHOOK: Lineage: tt_b.a SIMPLE null[] +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] PREHOOK: query: select * from tt_b PREHOOK: type: QUERY PREHOOK: Input: default@tt_b -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1955123010/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-43-04_259_8667418921734061805/10000 POSTHOOK: query: select * from tt_b POSTHOOK: type: QUERY POSTHOOK: Input: default@tt_b -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1955123010/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-43-04_259_8667418921734061805/10000 +POSTHOOK: Lineage: tt_b.b SIMPLE null[] +POSTHOOK: Lineage: tt_b.a SIMPLE null[] +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] NULL NULL NULL NULL NULL NULL @@ -96,6 +110,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@temp_null POSTHOOK: Output: ../build/ql/test/data/warehouse/null_columns.out +POSTHOOK: Lineage: tt_b.b SIMPLE null[] +POSTHOOK: Lineage: tt_b.a SIMPLE null[] +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] \N\N \N\N \N\N @@ -107,13 +125,25 @@ POSTHOOK: query: drop table tt POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tt +POSTHOOK: Lineage: tt_b.b SIMPLE null[] +POSTHOOK: Lineage: tt_b.a SIMPLE null[] +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] PREHOOK: query: drop table tt_b PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tt_b POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tt_b +POSTHOOK: Lineage: tt_b.b SIMPLE null[] +POSTHOOK: Lineage: tt_b.a SIMPLE null[] +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] PREHOOK: query: drop table temp_null PREHOOK: type: DROPTABLE POSTHOOK: query: drop table temp_null POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@temp_null +POSTHOOK: Lineage: tt_b.b SIMPLE null[] +POSTHOOK: Lineage: tt_b.a SIMPLE null[] +POSTHOOK: Lineage: tt.b SIMPLE null[] +POSTHOOK: Lineage: tt.a SIMPLE null[] Index: ql/src/test/results/clientpositive/union10.q.out =================================================================== --- ql/src/test/results/clientpositive/union10.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union10.q.out (working copy) @@ -84,7 +84,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/201780527/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-15_017_6287086376242811706/10002 Union Select Operator expressions: @@ -108,7 +108,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/201780527/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-15_017_6287086376242811706/10004 Union Select Operator expressions: @@ -132,7 +132,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/201780527/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-15_017_6287086376242811706/10005 Union Select Operator expressions: @@ -164,7 +164,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/48152402/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-15_017_6287086376242811706/10000 Stage: Stage-0 Move Operator @@ -179,7 +179,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/201780527/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-15_017_6287086376242811706/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -301,14 +301,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src)s2.null, (src)s3.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] PREHOOK: query: select * from tmptable x sort by x.key PREHOOK: type: QUERY PREHOOK: Input: default@tmptable -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1819535514/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-31_344_1143911819240145420/10000 POSTHOOK: query: select * from tmptable x sort by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1819535514/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-31_344_1143911819240145420/10000 +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src)s2.null, (src)s3.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] tst1 500 tst2 500 tst3 500 @@ -317,3 +321,5 @@ POSTHOOK: query: drop table tmptable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src)s2.null, (src)s3.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] Index: ql/src/test/results/clientpositive/rcfile_null_value.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_null_value.q.out (revision 927279) +++ ql/src/test/results/clientpositive/rcfile_null_value.q.out (working copy) @@ -11,14 +11,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@src1_rc +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM src1_rc PREHOOK: type: QUERY PREHOOK: Input: default@src1_rc -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1917284691/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-35_196_7735701627356817024/10000 POSTHOOK: query: SELECT * FROM src1_rc POSTHOOK: type: QUERY POSTHOOK: Input: default@src1_rc -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1917284691/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-35_196_7735701627356817024/10000 +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 val_238 311 val_311 @@ -49,11 +53,15 @@ POSTHOOK: query: DROP TABLE src1_rc POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@src1_rc +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: CREATE TABLE dest1_rc(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS RCFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE dest1_rc(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1_rc +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: EXPLAIN FROM ( FROM @@ -84,6 +92,8 @@ ) c INSERT OVERWRITE TABLE dest1_rc SELECT c.c1, c.c2, c.c3, c.c4 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1_rc)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4))))) @@ -253,14 +263,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1_rc +POSTHOOK: Lineage: dest1_rc.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1_rc.* FROM dest1_rc PREHOOK: type: QUERY PREHOOK: Input: default@dest1_rc -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1230860778/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-39_541_7266327697809575781/10000 POSTHOOK: query: SELECT dest1_rc.* FROM dest1_rc POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1_rc -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1230860778/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-39_541_7266327697809575781/10000 +POSTHOOK: Lineage: dest1_rc.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 @@ -275,3 +297,9 @@ POSTHOOK: query: DROP TABLE dest1_rc POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1_rc +POSTHOOK: Lineage: dest1_rc.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1_rc.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_rc.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby3_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby3_map_skew.q.out (working copy) @@ -119,7 +119,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1498525258/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-30_326_3751834014024695261/10002 Reduce Output Operator sort order: tag: -1 @@ -247,17 +247,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1521208223/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-37_942_4143207350908295519/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1521208223/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-37_942_4143207350908295519/10000 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 130091.0 260.182 256.10355987055016 98.0 0.0 142.9268095075238 143.06995106518906 20428.072876 20469.01089779559 PREHOOK: query: DROP TABLE dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -95,7 +95,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -106,12 +106,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451339 + transient_lastDdlTime 1269538381 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -170,7 +170,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -181,12 +181,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451339 + transient_lastDdlTime 1269538381 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -234,7 +234,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -245,21 +245,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451339 + transient_lastDdlTime 1269538381 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x, null-subquery2:subq1-subquery2:x1] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x, null-subquery2:subq1-subquery2:x1] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -270,12 +270,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451338 + transient_lastDdlTime 1269538380 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -286,12 +286,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451338 + transient_lastDdlTime 1269538380 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -303,14 +303,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -320,20 +320,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451339 + transient_lastDdlTime 1269538381 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -349,9 +349,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -362,12 +362,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451339 + transient_lastDdlTime 1269538381 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -378,12 +378,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451339 + transient_lastDdlTime 1269538381 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -392,7 +392,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-19_892_7502405409639232846/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-01_016_4383065946329210021/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -403,12 +403,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451339 + transient_lastDdlTime 1269538381 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -439,14 +439,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)x.FieldSchema(name:value, type:string, comment:default), (src)x1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-26_098_2878417726599997408/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-05_314_2131063934070352019/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-26_098_2878417726599997408/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-05_314_2131063934070352019/10000 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)x.FieldSchema(name:value, type:string, comment:default), (src)x1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 val_128 128 val_128 128 val_128 @@ -486,3 +492,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)x.FieldSchema(name:value, type:string, comment:default), (src)x1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input39.q.out =================================================================== --- ql/src/test/results/clientpositive/input39.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input39.q.out (working copy) @@ -26,6 +26,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1@ds=1 +POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table t1 partition (ds='2') select key, value from src PREHOOK: type: QUERY @@ -36,6 +38,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t1@ds=2 +POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table t2 partition (ds='1') select key, value from src PREHOOK: type: QUERY @@ -46,12 +52,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@t2@ds=1 +POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1' PREHOOK: type: QUERY POSTHOOK: query: explain select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1' POSTHOOK: type: QUERY +POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF t1) (TOK_TABREF t2) (= (. (TOK_TABLE_OR_COL t1) key) (. (TOK_TABLE_OR_COL t2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL t1) ds) '1') (= (. (TOK_TABLE_OR_COL t2) ds) '1'))))) @@ -140,7 +158,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-21-55_234_8836849389177124348/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-41_795_1853597156454713366/10002 Reduce Output Operator sort order: tag: -1 @@ -175,20 +193,38 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t2@ds=1 PREHOOK: Input: default@t1@ds=1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-21-55_400_7420820799771241077/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-42_323_2036435004449571494/10000 POSTHOOK: query: select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1' POSTHOOK: type: QUERY POSTHOOK: Input: default@t2@ds=1 POSTHOOK: Input: default@t1@ds=1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-21-55_400_7420820799771241077/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-42_323_2036435004449571494/10000 +POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 18 PREHOOK: query: drop table t1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table t1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t1 +POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table t2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table t2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t2 +POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -161,7 +161,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -172,12 +172,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -237,7 +237,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -248,12 +248,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -262,12 +262,12 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket21.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -279,12 +279,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348782 + transient_lastDdlTime 1269536541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -296,12 +296,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348782 + transient_lastDdlTime 1269536541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -313,14 +313,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -330,20 +330,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -359,9 +359,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -372,12 +372,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -388,12 +388,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -402,7 +402,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-24_320_7804903070266586536/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-23_376_1521271256398206472/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -413,12 +413,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -441,14 +441,20 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-32_954_8069950489380829899/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-30_107_8887243205158072765/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-32_954_8069950489380829899/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-30_107_8887243205158072765/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -460,6 +466,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b @@ -476,14 +488,32 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-49_054_4090200478765355576/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-42_498_8303657555798561849/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-49_054_4090200478765355576/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-42_498_8303657555798561849/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -495,20 +525,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-56_624_3600396860306016630/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-48_722_6274762129345072471/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-56_624_3600396860306016630/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-48_722_6274762129345072471/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 0 0 0 PREHOOK: query: explain extended insert overwrite table bucketmapjoin_tmp_result @@ -522,6 +576,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcbucket_mapjoin a) (TOK_TABREF srcbucket_mapjoin_part b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB bucketmapjoin_tmp_result)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL b) ds) "2008-04-08")))) @@ -584,7 +650,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -595,12 +661,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -655,7 +721,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -666,12 +732,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -680,12 +746,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -699,13 +765,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348782 + transient_lastDdlTime 1269536542 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -717,13 +783,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348782 + transient_lastDdlTime 1269536542 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -735,14 +801,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -752,20 +818,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -781,9 +847,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -794,12 +860,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -810,12 +876,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -824,7 +890,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-01_012_3767509085170715505/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-52_083_5899010731773300358/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -835,12 +901,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348784 + transient_lastDdlTime 1269536543 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -863,14 +929,44 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-10_899_5943241423234312650/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-59_762_3719157792964379971/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-10_899_5943241423234312650/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-59_762_3719157792964379971/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -882,6 +978,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b @@ -898,14 +1012,56 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-28_066_1208658957261253807/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-13_148_2346782318774783818/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-28_066_1208658957261253807/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-13_148_2346782318774783818/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -917,48 +1073,240 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-36_714_9051036103640437662/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-19_292_5412815435989327698/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-36_714_9051036103640437662/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-19_292_5412815435989327698/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 0 0 0 PREHOOK: query: drop table bucketmapjoin_hash_result_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table bucketmapjoin_hash_result_1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table bucketmapjoin_tmp_result PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_tmp_result POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/input_columnarserde.q.out =================================================================== --- ql/src/test/results/clientpositive/input_columnarserde.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_columnarserde.q.out (working copy) @@ -101,14 +101,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@input_columnarserde +POSTHOOK: Lineage: input_columnarserde.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT input_columnarserde.* FROM input_columnarserde DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@input_columnarserde -PREHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_0/build/ql/tmp/1940999347/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-34_326_3205210422383726345/10000 POSTHOOK: query: SELECT input_columnarserde.* FROM input_columnarserde DISTRIBUTE BY 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@input_columnarserde -POSTHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_0/build/ql/tmp/1940999347/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-34_326_3205210422383726345/10000 +POSTHOOK: Lineage: input_columnarserde.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -123,11 +133,16 @@ PREHOOK: query: SELECT input_columnarserde.a[0], input_columnarserde.b[0], input_columnarserde.c['key2'], input_columnarserde.d, input_columnarserde.e FROM input_columnarserde DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@input_columnarserde -PREHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_0/build/ql/tmp/534165813/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-37_424_6016857450369816629/10000 POSTHOOK: query: SELECT input_columnarserde.a[0], input_columnarserde.b[0], input_columnarserde.c['key2'], input_columnarserde.d, input_columnarserde.e FROM input_columnarserde DISTRIBUTE BY 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@input_columnarserde -POSTHOOK: Output: file:/data/users/heyongqiang/trunk/VENDOR.hive/trunk/.ptest_0/build/ql/tmp/534165813/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-37_424_6016857450369816629/10000 +POSTHOOK: Lineage: input_columnarserde.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 @@ -144,3 +159,8 @@ POSTHOOK: query: drop table input_columnarserde POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@input_columnarserde +POSTHOOK: Lineage: input_columnarserde.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: input_columnarserde.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] Index: ql/src/test/results/clientpositive/groupby10.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby10.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby10.q.out (working copy) @@ -107,7 +107,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1034721639/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-39_472_3027396559259212371/10004 Reduce Output Operator key expressions: expr: _col0 @@ -173,7 +173,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1034721639/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-39_472_3027396559259212371/10005 Reduce Output Operator key expressions: expr: _col0 @@ -251,14 +251,26 @@ POSTHOOK: Input: default@input POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: dest1.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: SELECT * from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1454167877/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-49_175_7329092573869531170/10000 POSTHOOK: query: SELECT * from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1454167877/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-49_175_7329092573869531170/10000 +POSTHOOK: Lineage: dest2.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: dest1.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] 27 1 1 66 1 1 86 1 1 @@ -282,11 +294,17 @@ PREHOOK: query: SELECT * from dest2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/750550239/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-49_223_7129826458386427493/10000 POSTHOOK: query: SELECT * from dest2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/750550239/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-49_223_7129826458386427493/10000 +POSTHOOK: Lineage: dest2.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: dest1.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] 27 27 27 66 66 66 86 86 86 @@ -312,13 +330,31 @@ POSTHOOK: query: drop table INPUT POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@input +POSTHOOK: Lineage: dest2.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: dest1.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest2.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: dest1.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table dest2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: dest1.val2 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.val1 UDAF null[(input)input.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(input)input.FieldSchema(name:key, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/mapreduce6.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce6.q.out (working copy) @@ -113,14 +113,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/492905310/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-26_826_6906319313116786149/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/492905310/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-26_826_6906319313116786149/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 490 49 0 val_490 491 49 1 val_491 492 49 2 val_492 Index: ql/src/test/results/clientpositive/groupby1_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby1_noskew.q.out (working copy) @@ -94,14 +94,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_g1 +POSTHOOK: Lineage: dest_g1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest_g1.* FROM dest_g1 PREHOOK: type: QUERY PREHOOK: Input: default@dest_g1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1084528559/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-36_858_2450586086757651935/10000 POSTHOOK: query: SELECT dest_g1.* FROM dest_g1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_g1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1084528559/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-36_858_2450586086757651935/10000 +POSTHOOK: Lineage: dest_g1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/join_rc.q.out =================================================================== --- ql/src/test/results/clientpositive/join_rc.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join_rc.q.out (working copy) @@ -24,6 +24,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@join_rc1 +POSTHOOK: Lineage: join_rc1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table join_rc2 select * from src PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -32,6 +34,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@join_rc2 +POSTHOOK: Lineage: join_rc1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select join_rc1.key, join_rc2.value FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key @@ -40,6 +46,10 @@ select join_rc1.key, join_rc2.value FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: join_rc1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF join_rc1) (TOK_TABREF join_rc2) (= (. (TOK_TABLE_OR_COL join_rc1) key) (. (TOK_TABLE_OR_COL join_rc2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL join_rc1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL join_rc2) value))))) @@ -114,13 +124,17 @@ PREHOOK: type: QUERY PREHOOK: Input: default@join_rc2 PREHOOK: Input: default@join_rc1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-47_640_1441550174396831657/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-21_894_4594765538185604807/10000 POSTHOOK: query: select join_rc1.key, join_rc2.value FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key POSTHOOK: type: QUERY POSTHOOK: Input: default@join_rc2 POSTHOOK: Input: default@join_rc1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-47_640_1441550174396831657/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-21_894_4594765538185604807/10000 +POSTHOOK: Lineage: join_rc1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -1154,8 +1168,16 @@ POSTHOOK: query: drop table join_rc1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@join_rc1 +POSTHOOK: Lineage: join_rc1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table join_rc2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table join_rc2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@join_rc2 +POSTHOOK: Lineage: join_rc1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: join_rc2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join8.q.out =================================================================== --- ql/src/test/results/clientpositive/join8.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join8.q.out (working copy) @@ -206,14 +206,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1413469153/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-33_983_7609828628349689800/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1413469153/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-33_983_7609828628349689800/10000 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 NULL NULL 12 val_12 NULL NULL 12 val_12 NULL NULL Index: ql/src/test/results/clientpositive/groupby1_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby1_map_skew.q.out (working copy) @@ -72,7 +72,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/858199914/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-24_763_2060962697580403789/10002 Reduce Output Operator key expressions: expr: _col0 @@ -137,14 +137,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1273565267/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-31_168_4914051884317268922/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1273565267/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-31_168_4914051884317268922/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/input11.q.out =================================================================== --- ql/src/test/results/clientpositive/input11.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input11.q.out (working copy) @@ -66,7 +66,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1086060660/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-03_641_3453032239409756404/10000 Stage: Stage-0 Move Operator @@ -81,7 +81,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/154122441/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-03_641_3453032239409756404/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -115,14 +115,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/722123875/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-07_198_7354559796699988048/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/722123875/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-07_198_7354559796699988048/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 27 val_27 98 val_98 Index: ql/src/test/results/clientpositive/groupby6_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby6_noskew.q.out (working copy) @@ -82,14 +82,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1760436384/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-03_818_7755412084091059238/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1760436384/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-03_818_7755412084091059238/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 1 2 Index: ql/src/test/results/clientpositive/input34.q.out =================================================================== --- ql/src/test/results/clientpositive/input34.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input34.q.out (working copy) @@ -80,7 +80,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/575091180/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-02_938_1525129788396511595/10000 Stage: Stage-0 Move Operator @@ -95,7 +95,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1105003109/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-02_938_1525129788396511595/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -139,14 +139,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/532945765/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-06_426_5364890707830156291/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/532945765/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-06_426_5364890707830156291/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 Index: ql/src/test/results/clientpositive/union6.q.out =================================================================== --- ql/src/test/results/clientpositive/union6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union6.q.out (working copy) @@ -79,7 +79,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1465614368/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-59_198_8387282828580994722/10002 Union Select Operator expressions: @@ -96,7 +96,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1465614368/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-59_198_8387282828580994722/10004 Union Select Operator expressions: @@ -121,7 +121,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1081681753/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-59_198_8387282828580994722/10000 Stage: Stage-0 Move Operator @@ -136,7 +136,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1465614368/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-59_198_8387282828580994722/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -196,14 +196,18 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.key SET null[(src1)s2.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from tmptable x sort by x.key, x.value PREHOOK: type: QUERY PREHOOK: Input: default@tmptable -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1370886727/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-32-11_633_7062410376836151710/10000 POSTHOOK: query: select * from tmptable x sort by x.key, x.value POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1370886727/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-32-11_633_7062410376836151710/10000 +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.key SET null[(src1)s2.FieldSchema(name:key, type:string, comment:default), ] @@ -235,3 +239,5 @@ POSTHOOK: query: drop table tmptable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src1)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.key SET null[(src1)s2.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/mapreduce1.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce1.q.out (working copy) @@ -122,14 +122,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/337545490/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-54_549_8597132926171971648/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/337545490/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-38-54_549_8597132926171971648/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 0 0 val_0 0 0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/bucket2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucket2.q.out (working copy) @@ -49,9 +49,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -62,12 +62,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133685 + transient_lastDdlTime 1269536305 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -78,12 +78,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133685 + transient_lastDdlTime 1269536305 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -99,7 +99,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-34-46_333_5166554257056855352/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-26_574_4817555739924274249/10000 NumFilesPerFileSink: 2 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -111,12 +111,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket2_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket2_1 name bucket2_1 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133686 + transient_lastDdlTime 1269536306 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket2_1 TotalFiles: 2 @@ -126,7 +126,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-34-46_333_5166554257056855352/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-26_574_4817555739924274249/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -137,15 +137,15 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket2_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket2_1 name bucket2_1 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133686 + transient_lastDdlTime 1269536306 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket2_1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-34-46_333_5166554257056855352/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-26_574_4817555739924274249/10001 PREHOOK: query: insert overwrite table bucket2_1 @@ -158,12 +158,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bucket2_1 +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key PREHOOK: type: QUERY POSTHOOK: query: explain select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF bucket2_1 (TOK_TABLESAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))))) @@ -221,11 +225,13 @@ PREHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key PREHOOK: type: QUERY PREHOOK: Input: default@bucket2_1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-34-51_044_2605954984360069823/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-30_184_3211075560530376798/10000 POSTHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucket2_1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-34-51_044_2605954984360069823/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-30_184_3211075560530376798/10000 +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -478,3 +484,5 @@ POSTHOOK: query: drop table bucket2_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucket2_1 +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input20.q.out =================================================================== --- ql/src/test/results/clientpositive/input20.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input20.q.out (working copy) @@ -140,14 +140,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM dest1 SORT BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1591758903/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-28_002_817926079458334515/10000 POSTHOOK: query: SELECT * FROM dest1 SORT BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1591758903/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-28_002_817926079458334515/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 1 105_105 1 10_10 1 111_111 Index: ql/src/test/results/clientpositive/input14_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input14_limit.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input14_limit.q.out (working copy) @@ -75,7 +75,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/275552824/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-36_089_972205030957163858/10002 Reduce Output Operator key expressions: expr: _col0 @@ -151,14 +151,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/679446797/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-42_607_3925142902973061623/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/679446797/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-42_607_3925142902973061623/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/partition_wise_fileformat.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_wise_fileformat.q.out (revision 927279) +++ ql/src/test/results/clientpositive/partition_wise_fileformat.q.out (working copy) @@ -11,13 +11,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@partition_test_partitioned@dt=100 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: show table extended like partition_test_partitioned PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned inputformat:org.apache.hadoop.mapred.TextInputFormat outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat columns:struct columns { string key, string value} @@ -27,16 +31,18 @@ totalFileSize:216 maxFileSize:216 minFileSize:216 -lastAccessTime:unknown -lastUpdateTime:1265695450000 +lastAccessTime:0 +lastUpdateTime:1269539142000 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=100) PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned partition(dt=100) POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned/dt=100 +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100 inputformat:org.apache.hadoop.mapred.TextInputFormat outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat columns:struct columns { string key, string value} @@ -46,17 +52,19 @@ totalFileSize:216 maxFileSize:216 minFileSize:216 -lastAccessTime:unknown -lastUpdateTime:1265695450000 +lastAccessTime:0 +lastUpdateTime:1269539142000 PREHOOK: query: select key from partition_test_partitioned where dt=100 PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=100 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-11_108_1574228616810949519/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-43_204_6205360003133304742/10000 POSTHOOK: query: select key from partition_test_partitioned where dt=100 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=100 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-11_108_1574228616810949519/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-43_204_6205360003133304742/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -85,11 +93,13 @@ PREHOOK: query: select key from partition_test_partitioned PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=100 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-14_894_1993466480222470885/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-46_125_6433538793864342214/10000 POSTHOOK: query: select key from partition_test_partitioned POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=100 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-14_894_1993466480222470885/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-46_125_6433538793864342214/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -121,6 +131,8 @@ POSTHOOK: type: null POSTHOOK: Input: default@partition_test_partitioned POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt=101) select * from src1 PREHOOK: type: QUERY PREHOOK: Input: default@src1 @@ -129,13 +141,21 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@partition_test_partitioned@dt=101 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: show table extended like partition_test_partitioned PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat columns:struct columns { string key, string value} @@ -145,16 +165,20 @@ totalFileSize:586 maxFileSize:370 minFileSize:216 -lastAccessTime:unknown -lastUpdateTime:1265695461000 +lastAccessTime:0 +lastUpdateTime:1269539151000 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=100) PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned partition(dt=100) POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned/dt=100 +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100 inputformat:org.apache.hadoop.mapred.TextInputFormat outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat columns:struct columns { string key, string value} @@ -164,16 +188,20 @@ totalFileSize:216 maxFileSize:216 minFileSize:216 -lastAccessTime:unknown -lastUpdateTime:1265695461000 +lastAccessTime:0 +lastUpdateTime:1269539151000 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=101) PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned partition(dt=101) POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned/dt=101 +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned/dt=101 inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat columns:struct columns { string key, string value} @@ -183,17 +211,21 @@ totalFileSize:370 maxFileSize:370 minFileSize:370 -lastAccessTime:unknown -lastUpdateTime:1265695461000 +lastAccessTime:0 +lastUpdateTime:1269539151000 PREHOOK: query: select key from partition_test_partitioned where dt=100 PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=100 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-21_275_2001928558911118972/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-52_233_9035140244513444398/10000 POSTHOOK: query: select key from partition_test_partitioned where dt=100 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=100 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-21_275_2001928558911118972/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-52_233_9035140244513444398/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -222,11 +254,15 @@ PREHOOK: query: select key from partition_test_partitioned where dt=101 PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=101 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-24_508_5231406110420378004/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-55_206_4938853017414127946/10000 POSTHOOK: query: select key from partition_test_partitioned where dt=101 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=101 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-24_508_5231406110420378004/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-55_206_4938853017414127946/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -256,12 +292,16 @@ PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=100 PREHOOK: Input: default@partition_test_partitioned@dt=101 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-27_864_595783441105808452/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-58_128_5025234910075665018/10000 POSTHOOK: query: select key from partition_test_partitioned POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=100 POSTHOOK: Input: default@partition_test_partitioned@dt=101 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-27_864_595783441105808452/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-58_128_5025234910075665018/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -318,6 +358,10 @@ POSTHOOK: type: null POSTHOOK: Input: default@partition_test_partitioned POSTHOOK: Output: default@partition_test_partitioned +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt=102) select * from src1 PREHOOK: type: QUERY PREHOOK: Input: default@src1 @@ -326,13 +370,25 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@partition_test_partitioned@dt=102 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: show table extended like partition_test_partitioned PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat columns:struct columns { string key, string value} @@ -342,16 +398,22 @@ totalFileSize:1474 maxFileSize:888 minFileSize:216 -lastAccessTime:unknown -lastUpdateTime:1265695474000 +lastAccessTime:0 +lastUpdateTime:1269539164000 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=100) PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned partition(dt=100) POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned/dt=100 +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100 inputformat:org.apache.hadoop.mapred.TextInputFormat outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat columns:struct columns { string key, string value} @@ -361,16 +423,22 @@ totalFileSize:216 maxFileSize:216 minFileSize:216 -lastAccessTime:unknown -lastUpdateTime:1265695474000 +lastAccessTime:0 +lastUpdateTime:1269539164000 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=101) PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned partition(dt=101) POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned/dt=101 +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned/dt=101 inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat columns:struct columns { string key, string value} @@ -380,16 +448,22 @@ totalFileSize:370 maxFileSize:370 minFileSize:370 -lastAccessTime:unknown -lastUpdateTime:1265695474000 +lastAccessTime:0 +lastUpdateTime:1269539164000 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=102) PREHOOK: type: SHOW_TABLESTATUS POSTHOOK: query: show table extended like partition_test_partitioned partition(dt=102) POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] tableName:partition_test_partitioned -owner:zshao -location:file:/data/users/zshao/hadoop_hive_trunk/build/ql/test/data/warehouse/partition_test_partitioned/dt=102 +owner:athusoo +location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/partition_test_partitioned/dt=102 inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat columns:struct columns { string key, string value} @@ -399,17 +473,23 @@ totalFileSize:888 maxFileSize:888 minFileSize:888 -lastAccessTime:unknown -lastUpdateTime:1265695474000 +lastAccessTime:0 +lastUpdateTime:1269539164000 PREHOOK: query: select key from partition_test_partitioned where dt=100 PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=100 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-34_761_6787250997648210654/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-04_452_4518526475126620034/10000 POSTHOOK: query: select key from partition_test_partitioned where dt=100 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=100 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-34_761_6787250997648210654/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-04_452_4518526475126620034/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -438,11 +518,17 @@ PREHOOK: query: select key from partition_test_partitioned where dt=101 PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=101 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-37_955_7303542209833538415/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-07_405_520778066278064762/10000 POSTHOOK: query: select key from partition_test_partitioned where dt=101 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=101 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-37_955_7303542209833538415/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-07_405_520778066278064762/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -471,11 +557,17 @@ PREHOOK: query: select key from partition_test_partitioned where dt=102 PREHOOK: type: QUERY PREHOOK: Input: default@partition_test_partitioned@dt=102 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-41_137_8811852470249659413/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-10_336_8124420315869003240/10000 POSTHOOK: query: select key from partition_test_partitioned where dt=102 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=102 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-41_137_8811852470249659413/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-10_336_8124420315869003240/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -506,13 +598,19 @@ PREHOOK: Input: default@partition_test_partitioned@dt=100 PREHOOK: Input: default@partition_test_partitioned@dt=101 PREHOOK: Input: default@partition_test_partitioned@dt=102 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-44_320_1009489530520037925/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-13_269_2245223643396874312/10000 POSTHOOK: query: select key from partition_test_partitioned POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=100 POSTHOOK: Input: default@partition_test_partitioned@dt=101 POSTHOOK: Input: default@partition_test_partitioned@dt=102 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-44_320_1009489530520037925/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-13_269_2245223643396874312/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 @@ -593,13 +691,19 @@ PREHOOK: Input: default@partition_test_partitioned@dt=100 PREHOOK: Input: default@partition_test_partitioned@dt=101 PREHOOK: Input: default@partition_test_partitioned@dt=102 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-48_037_6984143836467375088/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-16_206_1257788709795864504/10000 POSTHOOK: query: select key from partition_test_partitioned where dt >=100 and dt <= 102 POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_test_partitioned@dt=100 POSTHOOK: Input: default@partition_test_partitioned@dt=101 POSTHOOK: Input: default@partition_test_partitioned@dt=102 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk/build/ql/scratchdir/hive_2010-02-08_22-04-48_037_6984143836467375088/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-16_206_1257788709795864504/10000 +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 238 311 Index: ql/src/test/results/clientpositive/groupby8_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby8_map.q.out (working copy) @@ -83,7 +83,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1432390249/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-05_610_7511458838836897996/10004 Reduce Output Operator key expressions: expr: _col0 @@ -142,7 +142,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1432390249/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-05_610_7511458838836897996/10005 Reduce Output Operator key expressions: expr: _col0 @@ -213,14 +213,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1735026003/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-15_032_5998834362499531821/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1735026003/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-15_032_5998834362499531821/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 @@ -533,11 +541,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1229550677/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-15_085_1103872958274070/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1229550677/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-15_085_1103872958274070/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/merge1.q.out =================================================================== --- ql/src/test/results/clientpositive/merge1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/merge1.q.out (working copy) @@ -98,7 +98,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/460591515/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-41_358_2255669243625745096/10000 Stage: Stage-0 Move Operator @@ -113,7 +113,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/374923564/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-41_358_2255669243625745096/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -147,14 +147,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1753710584/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-44_931_2219800410392100065/10000 POSTHOOK: query: select * from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1753710584/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-44_931_2219800410392100065/10000 +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -469,16 +473,22 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: create table test_src(key string, value string) partitioned by (ds string) PREHOOK: type: CREATETABLE POSTHOOK: query: create table test_src(key string, value string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@test_src +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: create table dest1(key string) PREHOOK: type: CREATETABLE POSTHOOK: query: create table dest1(key string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table test_src partition(ds='101') select * from src PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -487,6 +497,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_src@ds=101 +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table test_src partition(ds='102') select * from src PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -495,12 +509,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_src@ds=102 +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain insert overwrite table dest1 select key from test_src PREHOOK: type: QUERY POSTHOOK: query: explain insert overwrite table dest1 select key from test_src POSTHOOK: type: QUERY +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF test_src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))))) @@ -539,7 +565,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/197096399/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-51_120_8250065111432245704/10000 Stage: Stage-0 Move Operator @@ -554,7 +580,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/357863135/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-51_120_8250065111432245704/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -586,12 +612,26 @@ POSTHOOK: Input: default@test_src@ds=101 POSTHOOK: Input: default@test_src@ds=102 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: explain insert overwrite table dest1 select key from test_src PREHOOK: type: QUERY POSTHOOK: query: explain insert overwrite table dest1 select key from test_src POSTHOOK: type: QUERY +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF test_src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))))) @@ -630,7 +670,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1659642363/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-57_101_824273445207701751/10000 Stage: Stage-0 Move Operator @@ -645,7 +685,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/502887163/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-57_101_824273445207701751/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -677,13 +717,37 @@ POSTHOOK: Input: default@test_src@ds=101 POSTHOOK: Input: default@test_src@ds=102 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: drop table test_src PREHOOK: type: DROPTABLE POSTHOOK: query: drop table test_src POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@test_src +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: drop table dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: test_src PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.val UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_src PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(test_src)test_src.FieldSchema(name:ds, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/join3.q.out =================================================================== --- ql/src/test/results/clientpositive/join3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join3.q.out (working copy) @@ -119,14 +119,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_3/build/ql/scratchdir/hive_2010-02-12_22-18-11_028_5305709121053771635/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-04_283_1272118300896426804/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_3/build/ql/scratchdir/hive_2010-02-12_22-18-11_028_5305709121053771635/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-04_283_1272118300896426804/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input8.q.out =================================================================== --- ql/src/test/results/clientpositive/input8.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input8.q.out (working copy) @@ -62,7 +62,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2001952269/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-19_518_6103693853958568155/10000 Stage: Stage-0 Move Operator @@ -77,7 +77,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2062282429/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-19_518_6103693853958568155/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -113,14 +113,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/770495742/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-22_893_5601855571888188497/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/770495742/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-22_893_5601855571888188497/10000 +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] NULL NULL NULL NULL NULL NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/filter_join_breaktask.q.out =================================================================== --- ql/src/test/results/clientpositive/filter_join_breaktask.q.out (revision 927279) +++ ql/src/test/results/clientpositive/filter_join_breaktask.q.out (working copy) @@ -17,6 +17,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@filter_join_breaktask@ds=2008-04-08 +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: EXPLAIN EXTENDED SELECT f.key, g.value FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) @@ -27,6 +29,8 @@ FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='') POSTHOOK: type: QUERY +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF filter_join_breaktask f) (TOK_TABREF filter_join_breaktask m) (AND (AND (AND (= (. (TOK_TABLE_OR_COL f) key) (. (TOK_TABLE_OR_COL m) key)) (= (. (TOK_TABLE_OR_COL f) ds) '2008-04-08')) (= (. (TOK_TABLE_OR_COL m) ds) '2008-04-08')) (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL f) key)))) (TOK_TABREF filter_join_breaktask g) (AND (AND (AND (AND (= (. (TOK_TABLE_OR_COL g) value) (. (TOK_TABLE_OR_COL m) value)) (= (. (TOK_TABLE_OR_COL g) ds) '2008-04-08')) (= (. (TOK_TABLE_OR_COL m) ds) '2008-04-08')) (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL m) value))) (!= (. (TOK_TABLE_OR_COL m) value) '')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL f) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL g) value))))) @@ -98,9 +102,9 @@ type: string Needs Tagging: true Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [f, m] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [f, m] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -113,13 +117,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask name filter_join_breaktask partition_columns ds serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268931012 + transient_lastDdlTime 1269537197 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -130,13 +134,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask name filter_join_breaktask partition_columns ds serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268931012 + transient_lastDdlTime 1269537197 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: filter_join_breaktask name: filter_join_breaktask @@ -167,7 +171,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-18_09-50-26_479_724779691836428247/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-20_617_658522301702881738/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -222,10 +226,10 @@ type: string Needs Tagging: true Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-18_09-50-26_479_724779691836428247/10002 [$INTNAME] - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [g] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-20_617_658522301702881738/10002 [$INTNAME] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [g] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-18_09-50-26_479_724779691836428247/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-20_617_658522301702881738/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -241,7 +245,7 @@ columns _col0,_col4 columns.types int,string escape.delim \ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -254,13 +258,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask name filter_join_breaktask partition_columns ds serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268931012 + transient_lastDdlTime 1269537197 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -271,13 +275,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/filter_join_breaktask + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/filter_join_breaktask name filter_join_breaktask partition_columns ds serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268931012 + transient_lastDdlTime 1269537197 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: filter_join_breaktask name: filter_join_breaktask @@ -300,7 +304,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-18_09-50-26_479_724779691836428247/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-20_617_658522301702881738/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -322,13 +326,15 @@ JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='') PREHOOK: type: QUERY PREHOOK: Input: default@filter_join_breaktask@ds=2008-04-08 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-18_09-50-27_453_7500070879794525924/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-21_285_398192576186129820/10000 POSTHOOK: query: SELECT f.key, g.value FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='') POSTHOOK: type: QUERY POSTHOOK: Input: default@filter_join_breaktask@ds=2008-04-08 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-18_09-50-27_453_7500070879794525924/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-21_285_398192576186129820/10000 +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] 146 val_146 150 val_150 213 val_213 @@ -346,3 +352,5 @@ POSTHOOK: query: DROP TABLE filter_join_breaktask POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@filter_join_breaktask +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/udf6.q.out =================================================================== --- ql/src/test/results/clientpositive/udf6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf6.q.out (working copy) @@ -11,12 +11,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT IF(TRUE, 1, 2) FROM dest1 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT IF(TRUE, 1, 2) FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2))))) @@ -51,11 +53,12 @@ PREHOOK: query: SELECT IF(TRUE, 1, 2) FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2131215543/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-32_006_2253460797492154188/10000 POSTHOOK: query: SELECT IF(TRUE, 1, 2) FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2131215543/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-32_006_2253460797492154188/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 1 PREHOOK: query: EXPLAIN SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"), @@ -73,6 +76,7 @@ CAST(128 AS INT), CAST(1.0 AS DOUBLE), CAST('128' AS STRING) FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TOK_NULL 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TRUE "a" "b")) (TOK_SELEXPR (TOK_FUNCTION IF TRUE 0.1 0.2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_BIGINT 1) (TOK_FUNCTION TOK_BIGINT 2))) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_TINYINT 127) (TOK_FUNCTION TOK_TINYINT 126))) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_SMALLINT 127) (TOK_FUNCTION TOK_SMALLINT 128))) (TOK_SELEXPR (TOK_FUNCTION TOK_INT 128)) (TOK_SELEXPR (TOK_FUNCTION TOK_DOUBLE 1.0)) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING '128'))))) @@ -132,7 +136,7 @@ CAST('128' AS STRING) FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1758423880/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-35_114_4321388784000937302/10000 POSTHOOK: query: SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"), IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), CAST(2 AS BIGINT)), IF(FALSE, CAST(127 AS TINYINT), CAST(126 AS TINYINT)), @@ -141,5 +145,6 @@ CAST('128' AS STRING) FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1758423880/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-35_114_4321388784000937302/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 1 2 2 a 0.1 2 126 128 128 1.0 128 Index: ql/src/test/results/clientpositive/join17.q.out =================================================================== --- ql/src/test/results/clientpositive/join17.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join17.q.out (working copy) @@ -58,9 +58,9 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src2, src1] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src2, src1] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -71,12 +71,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451083 + transient_lastDdlTime 1269538187 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -87,12 +87,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451083 + transient_lastDdlTime 1269538187 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -130,7 +130,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-58-03_764_1270783777250765973/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-48_072_6246987934394656672/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,12 +141,12 @@ columns.types int:string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451083 + transient_lastDdlTime 1269538187 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -156,7 +156,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-58-03_764_1270783777250765973/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-48_072_6246987934394656672/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -166,15 +166,15 @@ columns.types int:string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451083 + transient_lastDdlTime 1269538187 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-58-03_764_1270783777250765973/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-48_072_6246987934394656672/10001 PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) @@ -187,14 +187,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value2 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key2 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value1 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-58-08_934_3068558115984234182/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-51_709_6104892223123605958/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-58-08_934_3068558115984234182/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-51_709_6104892223123605958/10000 +POSTHOOK: Lineage: dest1.value2 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key2 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value1 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/udf_concat_insert2.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_concat_insert2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_concat_insert2.q.out (working copy) @@ -13,14 +13,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/pyang/trunk/VENDOR.hive/trunk/build/ql/tmp/2058952662/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-13-15_449_1736087643892391112/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/pyang/trunk/VENDOR.hive/trunk/build/ql/tmp/2058952662/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-13-15_449_1736087643892391112/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[] 1234abcextra argument val_86 1234abcextra argument val_27 1234abcextra argument val_98 @@ -110,3 +114,5 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[] Index: ql/src/test/results/clientpositive/groupby6.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby6.q.out (working copy) @@ -58,7 +58,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1318366454/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-37_992_1911927733335616208/10002 Reduce Output Operator key expressions: expr: _col0 @@ -111,14 +111,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1066105408/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-44_068_819911530815185334/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1066105408/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-44_068_819911530815185334/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 1 2 Index: ql/src/test/results/clientpositive/create_insert_outputformat.q.out =================================================================== --- ql/src/test/results/clientpositive/create_insert_outputformat.q.out (revision 927279) +++ ql/src/test/results/clientpositive/create_insert_outputformat.q.out (working copy) @@ -21,10 +21,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@table_test_output_format +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: describe table_test_output_format PREHOOK: type: DESCTABLE POSTHOOK: query: describe table_test_output_format POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key int value string PREHOOK: query: DROP TABLE table_test_output_format @@ -32,10 +36,14 @@ POSTHOOK: query: DROP TABLE table_test_output_format POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table_test_output_format +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE table_test_output_format_sequencefile PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE table_test_output_format_sequencefile POSTHOOK: type: DROPTABLE +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat' @@ -45,6 +53,8 @@ OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@table_test_output_format_sequencefile +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: FROM src INSERT OVERWRITE TABLE table_test_output_format_sequencefile SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY @@ -55,10 +65,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@table_test_output_format_sequencefile +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: describe table_test_output_format_sequencefile PREHOOK: type: DESCTABLE POSTHOOK: query: describe table_test_output_format_sequencefile POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key int value string PREHOOK: query: DROP TABLE table_test_output_format_sequencefile @@ -66,10 +84,18 @@ POSTHOOK: query: DROP TABLE table_test_output_format_sequencefile POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table_test_output_format_sequencefile +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE table_test_output_format_hivesequencefile PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE table_test_output_format_hivesequencefile POSTHOOK: type: DROPTABLE +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' @@ -79,6 +105,10 @@ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@table_test_output_format_hivesequencefile +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: FROM src INSERT OVERWRITE TABLE table_test_output_format_hivesequencefile SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY @@ -89,10 +119,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@table_test_output_format_hivesequencefile +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_hivesequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_hivesequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: describe table_test_output_format_hivesequencefile PREHOOK: type: DESCTABLE POSTHOOK: query: describe table_test_output_format_hivesequencefile POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_hivesequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_hivesequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key int value string PREHOOK: query: DROP TABLE table_test_output_format_hivesequencefile @@ -100,3 +142,9 @@ POSTHOOK: query: DROP TABLE table_test_output_format_hivesequencefile POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table_test_output_format_hivesequencefile +POSTHOOK: Lineage: table_test_output_format_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_hivesequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: table_test_output_format_hivesequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -50,7 +50,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -61,21 +61,21 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452780 + transient_lastDdlTime 1269539530 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Partition base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat @@ -87,12 +87,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452778 + transient_lastDdlTime 1269539529 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -104,12 +104,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452778 + transient_lastDdlTime 1269539529 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -121,14 +121,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -138,20 +138,20 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452780 + transient_lastDdlTime 1269539530 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -165,9 +165,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -178,12 +178,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452780 + transient_lastDdlTime 1269539530 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -194,12 +194,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452780 + transient_lastDdlTime 1269539530 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -208,7 +208,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-20_117_2190795569250642543/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-10_822_1768469916785075184/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -219,12 +219,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452780 + transient_lastDdlTime 1269539530 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -241,14 +241,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 SORT BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-28_181_757472784611543801/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-17_170_8470881548510328028/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 SORT BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-28_181_757472784611543801/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-17_170_8470881548510328028/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -83,7 +83,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -94,12 +94,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451194 + transient_lastDdlTime 1269538276 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -153,7 +153,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -164,12 +164,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451194 + transient_lastDdlTime 1269538276 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -213,7 +213,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -224,21 +224,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451194 + transient_lastDdlTime 1269538276 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -252,13 +252,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451192 + transient_lastDdlTime 1269538274 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -269,13 +269,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451192 + transient_lastDdlTime 1269538274 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -287,14 +287,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -304,20 +304,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451194 + transient_lastDdlTime 1269538276 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -333,9 +333,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -346,12 +346,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451194 + transient_lastDdlTime 1269538276 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -362,12 +362,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451194 + transient_lastDdlTime 1269538276 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -376,7 +376,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-54_846_3050914850387307486/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-16_670_5004039786810027007/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -387,12 +387,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451194 + transient_lastDdlTime 1269538276 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -417,14 +417,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-59_878_7121143175378744148/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-20_875_6439714964995837563/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-59-59_878_7121143175378744148/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-20_875_6439714964995837563/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 val_128 val_128 128 val_128 val_128 128 val_128 val_128 @@ -537,3 +543,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby5_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby5_map.q.out (working copy) @@ -85,12 +85,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2135010409/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-24_302_4581025682099207327/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2135010409/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-24_302_4581025682099207327/10000 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 130091 Index: ql/src/test/results/clientpositive/rcfile_default_format.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_default_format.q.out (revision 927279) +++ ql/src/test/results/clientpositive/rcfile_default_format.q.out (working copy) @@ -9,7 +9,7 @@ POSTHOOK: type: DESCTABLE key string from deserializer -Detailed Table Information Table(tableName:rcfile_default_format, dbName:default, owner:heyongqiang, createTime:1264196677, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/rcfile_default_format, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196677}, viewOriginalText:null, viewExpandedText:null) +Detailed Table Information Table(tableName:rcfile_default_format, dbName:default, owner:athusoo, createTime:1269539394, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/rcfile_default_format, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269539394}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src PREHOOK: type: CREATETABLE PREHOOK: Input: default@src @@ -24,7 +24,7 @@ key string from deserializer value string from deserializer -Detailed Table Information Table(tableName:rcfile_default_format_ctas, dbName:default, owner:heyongqiang, createTime:1264196683, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/rcfile_default_format_ctas, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196683}, viewOriginalText:null, viewExpandedText:null) +Detailed Table Information Table(tableName:rcfile_default_format_ctas, dbName:default, owner:athusoo, createTime:1269539397, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/rcfile_default_format_ctas, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269539397}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE @@ -38,13 +38,15 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@rcfile_default_format_txtfile +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string -Detailed Table Information Table(tableName:rcfile_default_format_txtfile, dbName:default, owner:heyongqiang, createTime:1264196683, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/rcfile_default_format_txtfile, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196683}, viewOriginalText:null, viewExpandedText:null) +Detailed Table Information Table(tableName:rcfile_default_format_txtfile, dbName:default, owner:athusoo, createTime:1269539397, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/rcfile_default_format_txtfile, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269539397}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas PREHOOK: type: CREATETABLE PREHOOK: Input: default@rcfile_default_format_ctas @@ -52,31 +54,37 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Input: default@rcfile_default_format_ctas POSTHOOK: Output: default@textfile_default_format_ctas +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DESCRIBE EXTENDED textfile_default_format_ctas PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED textfile_default_format_ctas POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string value string -Detailed Table Information Table(tableName:textfile_default_format_ctas, dbName:default, owner:heyongqiang, createTime:1264196692, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/textfile_default_format_ctas, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196692}, viewOriginalText:null, viewExpandedText:null) +Detailed Table Information Table(tableName:textfile_default_format_ctas, dbName:default, owner:athusoo, createTime:1269539403, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/textfile_default_format_ctas, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269539403}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: DROP TABLE rcfile_default_format PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE rcfile_default_format POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@rcfile_default_format +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE rcfile_default_format_ctas PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE rcfile_default_format_ctas POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@rcfile_default_format_ctas +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE rcfile_default_format_txtfile PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE rcfile_default_format_txtfile POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@rcfile_default_format_txtfile +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE textfile_default_format_ctas PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE textfile_default_format_ctas POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@textfile_default_format_ctas +POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/smb_mapjoin_7.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_7.q.out (revision 927279) +++ ql/src/test/results/clientpositive/smb_mapjoin_7.q.out (working copy) @@ -63,6 +63,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@smb_bucket4_2 +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table smb_join_results_empty_bigtable select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY @@ -75,6 +77,12 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results_empty_bigtable +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: insert overwrite table smb_join_results_empty_bigtable select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY @@ -87,14 +95,34 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results_empty_bigtable +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select * from smb_join_results_empty_bigtable order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results_empty_bigtable -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-07_414_9075895455978344401/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-01_950_6635840159086775676/10000 POSTHOOK: query: select * from smb_join_results_empty_bigtable order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results_empty_bigtable -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-07_414_9075895455978344401/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-01_950_6635840159086775676/10000 +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] NULL NULL 0 val_0 NULL NULL 0 val_0 NULL NULL 0 val_0 @@ -603,6 +631,16 @@ insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_TABREF smb_bucket4_1 a) (TOK_TABREF smb_bucket4_2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB smb_join_results)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR TOK_ALLCOLREF)))) @@ -683,14 +721,42 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from smb_join_results order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-20_724_8972446774229078097/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-08_502_4647343407820696652/10000 POSTHOOK: query: select * from smb_join_results order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-20_724_8972446774229078097/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-08_502_4647343407820696652/10000 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] NULL NULL 0 val_0 NULL NULL 0 val_0 NULL NULL 0 val_0 @@ -1201,55 +1267,217 @@ POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@normal_join_results +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results PREHOOK: type: QUERY PREHOOK: Input: default@normal_join_results -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-32_487_8597548153798727757/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-15_777_2631361411076459642/10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@normal_join_results -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-32_487_8597548153798727757/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-15_777_2631361411076459642/10000 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] 0 130091 0 36210398070 PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-38_536_6150065360796623962/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-19_002_3877607819666148009/10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-38_536_6150065360796623962/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-19_002_3877607819666148009/10000 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] 0 130091 0 36210398070 PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results_empty_bigtable PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results_empty_bigtable -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-42_746_6078097413289235679/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-22_340_2906649262007743023/10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results_empty_bigtable POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results_empty_bigtable -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_17-43-42_746_6078097413289235679/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-22_340_2906649262007743023/10000 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] 0 130091 0 36210398070 PREHOOK: query: drop table smb_join_results PREHOOK: type: DROPTABLE POSTHOOK: query: drop table smb_join_results POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@smb_join_results +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: drop table smb_join_results_empty_bigtable PREHOOK: type: DROPTABLE POSTHOOK: query: drop table smb_join_results_empty_bigtable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@smb_join_results_empty_bigtable +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: drop table normal_join_results PREHOOK: type: DROPTABLE POSTHOOK: query: drop table normal_join_results POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@normal_join_results +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: drop table smb_bucket4_1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table smb_bucket4_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@smb_bucket4_1 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: drop table smb_bucket4_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table smb_bucket4_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@smb_bucket4_2 +POSTHOOK: Lineage: smb_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: normal_join_results.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: normal_join_results.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v1 SIMPLE null[(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.k2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: smb_join_results_empty_bigtable.v2 SIMPLE null[(smb_bucket4_2)b.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/skewjoin.q.out =================================================================== --- ql/src/test/results/clientpositive/skewjoin.q.out (revision 927279) +++ ql/src/test/results/clientpositive/skewjoin.q.out (working copy) @@ -251,14 +251,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1 PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-39-57_324_3071297753179667524/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-10_514_557734090461583395/10000 POSTHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-39-57_324_3071297753179667524/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-10_514_557734090461583395/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 278697 101852390308 PREHOOK: query: EXPLAIN SELECT /*+ STREAMTABLE(a) */ * @@ -272,6 +276,8 @@ JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF T1 a) (TOK_TABREF T2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF T3 c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key))) (TOK_TABREF T4 d) (= (. (TOK_TABLE_OR_COL c) key) (. (TOK_TABLE_OR_COL d) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_STREAMTABLE (TOK_HINTARGLIST a))) (TOK_SELEXPR TOK_ALLCOLREF)))) @@ -404,7 +410,7 @@ PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-18_526_8099848720278828714/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-39_632_7807430882498367428/10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key @@ -414,7 +420,9 @@ POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-18_526_8099848720278828714/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-39_632_7807430882498367428/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 2 12 2 22 2 12 2 12 PREHOOK: query: EXPLAIN SELECT /*+ STREAMTABLE(a,c) */ * @@ -428,6 +436,8 @@ JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF T1 a) (TOK_TABREF T2 b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF T3 c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key))) (TOK_TABREF T4 d) (= (. (TOK_TABLE_OR_COL c) key) (. (TOK_TABLE_OR_COL d) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_STREAMTABLE (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF)))) @@ -560,7 +570,7 @@ PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-23_734_4785359328736541630/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-44_630_2960280964455841083/10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a,c) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key @@ -570,12 +580,16 @@ POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-23_734_4785359328736541630/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-44_630_2960280964455841083/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 2 12 2 22 2 12 2 12 PREHOOK: query: EXPLAIN FROM T1 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN FROM T1 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF T1 a) (TOK_TABREF src c) (= (+ (. (TOK_TABLE_OR_COL c) key) 1) (. (TOK_TABLE_OR_COL a) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_STREAMTABLE (TOK_HINTARGLIST a))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL a) key)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL a) val)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL c) key))))))) @@ -656,7 +670,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-29_021_8708656238087823514/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-49_239_5604491549703326966/10002 Reduce Output Operator sort order: tag: -1 @@ -701,12 +715,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-29_087_1046879883912334112/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-49_685_1276897217017372674/10000 POSTHOOK: query: FROM T1 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-29_087_1046879883912334112/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-49_685_1276897217017372674/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 198 6274 194 PREHOOK: query: EXPLAIN FROM (SELECT src.* FROM src) x @@ -722,6 +738,8 @@ ON (x.key = Y.key) SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) x) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) Y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL Y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL Y) key)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL Y) value))))))) @@ -890,7 +908,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-37_328_8353462754003839474/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-55_989_8130931921687143840/10002 Reduce Output Operator sort order: tag: -1 @@ -934,7 +952,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-37_530_5490496591134819475/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-56_669_748292354372716211/10000 POSTHOOK: query: FROM (SELECT src.* FROM src) x JOIN @@ -943,7 +961,9 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-40-37_530_5490496591134819475/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-58-56_669_748292354372716211/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 44481300 101852390308 PREHOOK: query: EXPLAIN FROM (SELECT src.* FROM src) x @@ -959,6 +979,8 @@ ON (x.key = Y.key and substring(x.value, 5)=substring(y.value, 5)+1) SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) x) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))))) Y) (and (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL Y) key)) (= (TOK_FUNCTION substring (. (TOK_TABLE_OR_COL x) value) 5) (+ (TOK_FUNCTION substring (. (TOK_TABLE_OR_COL y) value) 5) 1))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL Y) key)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL Y) value))))))) @@ -1137,7 +1159,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-42-06_875_657056406676769744/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-00-27_464_7630771677109054316/10002 Reduce Output Operator sort order: tag: -1 @@ -1181,7 +1203,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-42-07_486_5787001889489498724/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-00-28_154_8264366127572015288/10000 POSTHOOK: query: FROM (SELECT src.* FROM src) x JOIN @@ -1190,7 +1212,9 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-42-07_486_5787001889489498724/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-00-28_154_8264366127572015288/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] NULL NULL PREHOOK: query: EXPLAIN SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) @@ -1214,6 +1238,8 @@ (SELECT src.key as c5, src.value as c6 from src) src3 ON src1.c1 = src3.c5 AND src3.c5 < 80 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) 100))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c6)))) src3) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src3) c5)) (< (. (TOK_TABLE_OR_COL src3) c5) 80)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL src1) c1)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL src2) c4))))))) @@ -1464,7 +1490,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-42-43_139_341726508136962928/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-01-16_779_1521956566985868789/10002 Reduce Output Operator sort order: tag: -1 @@ -1633,7 +1659,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-42-43_572_1869488555235237678/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-01-17_956_6369185203227238833/10000 POSTHOOK: query: SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) FROM (SELECT src.key as c1, src.value as c2 from src) src1 @@ -1645,7 +1671,9 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-42-43_572_1869488555235237678/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-01-17_956_6369185203227238833/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 293143 -136853010385 PREHOOK: query: EXPLAIN SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key @@ -1653,6 +1681,8 @@ POSTHOOK: query: EXPLAIN SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF T1 k) (TOK_TABREF T1 v) (= (+ (. (TOK_TABLE_OR_COL k) key) 1) (. (TOK_TABLE_OR_COL v) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST v))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL k) key)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL v) val))))))) @@ -1718,7 +1748,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-25_195_82127211186947204/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-13_975_924947098799253930/10002 Select Operator expressions: expr: _col0 @@ -1778,134 +1808,166 @@ PREHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-25_264_8428740047419886929/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-14_350_2682958342670310619/10000 POSTHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-25_264_8428740047419886929/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-14_350_2682958342670310619/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 372 6320 PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.val PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-32_938_2032638800596167830/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-20_545_3595442308324067773/10000 POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.val POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-32_938_2032638800596167830/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-20_545_3595442308324067773/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] NULL NULL PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-40_641_1825449334973878743/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-26_703_7677620771848382130/10000 POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-40_641_1825449334973878743/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-26_703_7677620771848382130/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 429 12643 PREHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-48_833_5293250651977115318/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-32_734_6463141400357497439/10000 POSTHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-43-48_833_5293250651977115318/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-32_734_6463141400357497439/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 429 12643 PREHOOK: query: select count(1) from T1 a join T1 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-01_591_1464928610432503675/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-41_942_8379255960001049153/10000 POSTHOOK: query: select count(1) from T1 a join T1 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-01_591_1464928610432503675/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-41_942_8379255960001049153/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 8 PREHOOK: query: FROM T1 a LEFT OUTER JOIN T2 c ON c.key+1=a.key SELECT sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) PREHOOK: type: QUERY PREHOOK: Input: default@t2 PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-13_585_7159832475045196069/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-50_976_7652718712596823230/10000 POSTHOOK: query: FROM T1 a LEFT OUTER JOIN T2 c ON c.key+1=a.key SELECT sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-13_585_7159832475045196069/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-50_976_7652718712596823230/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 317 9462 50 PREHOOK: query: FROM T1 a RIGHT OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) PREHOOK: type: QUERY PREHOOK: Input: default@t2 PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-22_759_9104803007678785533/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-57_217_2739998458153710556/10000 POSTHOOK: query: FROM T1 a RIGHT OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-22_759_9104803007678785533/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-02-57_217_2739998458153710556/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 51 1570 318 PREHOOK: query: FROM T1 a FULL OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) PREHOOK: type: QUERY PREHOOK: Input: default@t2 PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-30_661_1739397497215155845/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-03_400_5678048695188371899/10000 POSTHOOK: query: FROM T1 a FULL OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-30_661_1739397497215155845/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-03_400_5678048695188371899/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 317 9462 318 PREHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 LEFT OUTER JOIN T2 src2 ON src1.key+1 = src2.key RIGHT OUTER JOIN T2 src3 ON src2.key = src3.key PREHOOK: type: QUERY PREHOOK: Input: default@t2 PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-38_409_1485725015115349891/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-09_605_6798367463475431909/10000 POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 LEFT OUTER JOIN T2 src2 ON src1.key+1 = src2.key RIGHT OUTER JOIN T2 src3 ON src2.key = src3.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-38_409_1485725015115349891/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-09_605_6798367463475431909/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 370 11003 377 PREHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 JOIN T2 src2 ON src1.key+1 = src2.key JOIN T2 src3 ON src2.key = src3.key PREHOOK: type: QUERY PREHOOK: Input: default@t2 PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-47_406_6458878623944968772/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-15_918_1277579659654246919/10000 POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 JOIN T2 src2 ON src1.key+1 = src2.key JOIN T2 src3 ON src2.key = src3.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-44-47_406_6458878623944968772/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-15_918_1277579659654246919/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 370 11003 377 PREHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k left outer join T1 v on k.key+1=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-45-06_018_2671345290627071148/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-28_785_1265015722573625605/10000 POSTHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k left outer join T1 v on k.key+1=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-45-06_018_2671345290627071148/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-03-28_785_1265015722573625605/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 372 6320 PREHOOK: query: DROP TABLE dest_j1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE T1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t1 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE T2 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t2 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE T3 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T3 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t3 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE T4 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T4 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t4 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/udf1.q.out =================================================================== --- ql/src/test/results/clientpositive/udf1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf1.q.out (working copy) @@ -113,7 +113,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1203564635/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-09-46_809_980271755669318206/10000 Stage: Stage-0 Move Operator @@ -128,7 +128,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/239786264/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-09-46_809_980271755669318206/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -208,12 +208,52 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c20 SIMPLE null[] +POSTHOOK: Lineage: dest1.c19 SIMPLE null[] +POSTHOOK: Lineage: dest1.c18 SIMPLE null[] +POSTHOOK: Lineage: dest1.c17 SIMPLE null[] +POSTHOOK: Lineage: dest1.c16 SIMPLE null[] +POSTHOOK: Lineage: dest1.c15 SIMPLE null[] +POSTHOOK: Lineage: dest1.c14 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[] +POSTHOOK: Lineage: dest1.c6 SIMPLE null[] +POSTHOOK: Lineage: dest1.c7 SIMPLE null[] +POSTHOOK: Lineage: dest1.c8 SIMPLE null[] +POSTHOOK: Lineage: dest1.c9 SIMPLE null[] +POSTHOOK: Lineage: dest1.c10 SIMPLE null[] +POSTHOOK: Lineage: dest1.c11 SIMPLE null[] +POSTHOOK: Lineage: dest1.c12 SIMPLE null[] +POSTHOOK: Lineage: dest1.c13 SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1255942668/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-09-51_054_5382830430043383291/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1255942668/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-09-51_054_5382830430043383291/10000 +POSTHOOK: Lineage: dest1.c20 SIMPLE null[] +POSTHOOK: Lineage: dest1.c19 SIMPLE null[] +POSTHOOK: Lineage: dest1.c18 SIMPLE null[] +POSTHOOK: Lineage: dest1.c17 SIMPLE null[] +POSTHOOK: Lineage: dest1.c16 SIMPLE null[] +POSTHOOK: Lineage: dest1.c15 SIMPLE null[] +POSTHOOK: Lineage: dest1.c14 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[] +POSTHOOK: Lineage: dest1.c6 SIMPLE null[] +POSTHOOK: Lineage: dest1.c7 SIMPLE null[] +POSTHOOK: Lineage: dest1.c8 SIMPLE null[] +POSTHOOK: Lineage: dest1.c9 SIMPLE null[] +POSTHOOK: Lineage: dest1.c10 SIMPLE null[] +POSTHOOK: Lineage: dest1.c11 SIMPLE null[] +POSTHOOK: Lineage: dest1.c12 SIMPLE null[] +POSTHOOK: Lineage: dest1.c13 SIMPLE null[] true false true true true false false false true true false true true acc abc abb hive hadoop AaAbAcA false Index: ql/src/test/results/clientpositive/input17.q.out =================================================================== --- ql/src/test/results/clientpositive/input17.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input17.q.out (working copy) @@ -117,14 +117,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1351757908/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-01_499_4193843283100401812/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1351757908/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-01_499_4193843283100401812/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] NULL null -1461153966 {"myint":49,"mystring":"343","underscore_int":7} -1952710705 {"myint":25,"mystring":"125","underscore_int":5} Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -84,9 +84,9 @@ type: bigint Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -97,12 +97,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451351 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -113,12 +113,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451351 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -142,7 +142,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -157,7 +157,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10002 Union Common Join Operator condition map: @@ -201,7 +201,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -212,17 +212,17 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451352 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10004 Union Common Join Operator condition map: @@ -266,7 +266,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -277,12 +277,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451352 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -339,7 +339,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -350,22 +350,22 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451352 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10002] - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10004 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10004] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10004 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10004] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -381,7 +381,7 @@ columns _col0,_col1 columns.types string,bigint escape.delim \ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10004 Partition base file name: 10004 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -405,14 +405,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -422,20 +422,20 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451352 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10001 Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -451,9 +451,9 @@ type: int Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10003 Partition base file name: 10003 input format: org.apache.hadoop.mapred.TextInputFormat @@ -464,12 +464,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451352 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -480,12 +480,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451352 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -494,7 +494,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -505,12 +505,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451352 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -560,9 +560,9 @@ type: bigint Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -573,12 +573,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451351 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -589,12 +589,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451351 + transient_lastDdlTime 1269538390 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -618,7 +618,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-32_871_8639228391120573230/10004 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-10_586_5938049575662968935/10004 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -655,14 +655,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)x.null, (src)x1.null, ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-50_239_3683729720966309884/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-24_504_8439629308217675396/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-02-50_239_3683729720966309884/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-24_504_8439629308217675396/10000 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)x.null, (src)x1.null, ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 3 146 val_146 2 150 val_150 1 @@ -681,3 +687,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)x.null, (src)x1.null, ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby1.q.out (working copy) @@ -63,7 +63,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - invalidscheme:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1515720962/10002 + invalidscheme:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-30_539_246727942447060642/10002 Reduce Output Operator key expressions: expr: _col0 @@ -128,14 +128,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_g1 +POSTHOOK: Lineage: dest_g1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest_g1.* FROM dest_g1 PREHOOK: type: QUERY PREHOOK: Input: default@dest_g1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1461207051/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-36_863_5208601874684351512/10000 POSTHOOK: query: SELECT dest_g1.* FROM dest_g1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_g1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1461207051/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-36_863_5208601874684351512/10000 +POSTHOOK: Lineage: dest_g1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -154,7 +154,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -165,12 +165,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -228,7 +228,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -239,12 +239,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -253,12 +253,12 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - b {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + b {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -270,12 +270,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348863 + transient_lastDdlTime 1269536605 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -287,12 +287,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348863 + transient_lastDdlTime 1269536605 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -304,14 +304,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -321,20 +321,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -350,9 +350,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -363,12 +363,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -379,12 +379,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -393,7 +393,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-44_937_1252971228120031368/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-26_757_7171657674441895065/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -404,12 +404,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -432,14 +432,20 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-53_282_7600912700619071025/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-33_377_4969208701787775650/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-07-53_282_7600912700619071025/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-33_377_4969208701787775650/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 0 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -451,6 +457,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b @@ -467,14 +479,32 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-09_508_5485100783825645091/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-45_550_6535849010357572939/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-09_508_5485100783825645091/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-45_550_6535849010357572939/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 0 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -486,20 +516,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-17_606_4918360383772438343/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-51_725_6245906963680883317/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-17_606_4918360383772438343/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-51_725_6245906963680883317/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] NULL NULL NULL PREHOOK: query: explain extended insert overwrite table bucketmapjoin_tmp_result @@ -513,6 +567,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcbucket_mapjoin a) (TOK_TABREF srcbucket_mapjoin_part_2 b) (and (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL b) ds) "2008-04-08")))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB bucketmapjoin_tmp_result)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))))) @@ -573,7 +639,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -584,12 +650,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -637,7 +703,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -648,12 +714,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -662,12 +728,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -681,13 +747,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -699,13 +765,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -717,14 +783,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -734,20 +800,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -763,9 +829,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -776,12 +842,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -792,12 +858,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -806,7 +872,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-21_630_2117412984874359610/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-03-54_976_1737258721837984440/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -817,12 +883,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348864 + transient_lastDdlTime 1269536606 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -845,14 +911,44 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-29_750_208622254935871953/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-01_634_8805888679507925136/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-29_750_208622254935871953/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-01_634_8805888679507925136/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] 0 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -864,6 +960,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b @@ -880,14 +994,56 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-55_601_5325555544277093581/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-13_950_5984975106804110099/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-08-55_601_5325555544277093581/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-13_950_5984975106804110099/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] 0 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -899,48 +1055,240 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-08_351_4983436337832635001/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-20_295_4102326631428870245/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-08_351_4983436337832635001/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-20_295_4102326631428870245/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] NULL NULL NULL PREHOOK: query: drop table bucketmapjoin_hash_result_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table bucketmapjoin_hash_result_1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table bucketmapjoin_tmp_result PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_tmp_result POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part_2 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part_2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/udf_concat_ws.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_concat_ws.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_concat_ws.q.out (working copy) @@ -24,6 +24,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT concat_ws(dest1.c1, dest1.c2, dest1.c3), concat_ws(',', dest1.c1, dest1.c2, dest1.c3), @@ -36,6 +39,9 @@ concat_ws(NULL, dest1.c1, dest1.c2, dest1.c3), concat_ws('**', dest1.c1, NULL, dest1.c3) FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION concat_ws (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws ',' (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws TOK_NULL (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '**' (. (TOK_TABLE_OR_COL dest1) c1) TOK_NULL (. (TOK_TABLE_OR_COL dest1) c3)))))) @@ -79,12 +85,15 @@ concat_ws('**', dest1.c1, NULL, dest1.c3) FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/mnt/vol/devrs003.snc1/jonchang/trunk/build/ql/tmp/2092269896/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-13-21_295_6879774606875226781/10000 POSTHOOK: query: SELECT concat_ws(dest1.c1, dest1.c2, dest1.c3), concat_ws(',', dest1.c1, dest1.c2, dest1.c3), concat_ws(NULL, dest1.c1, dest1.c2, dest1.c3), concat_ws('**', dest1.c1, NULL, dest1.c3) FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/mnt/vol/devrs003.snc1/jonchang/trunk/build/ql/tmp/2092269896/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-13-21_295_6879774606875226781/10000 +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] xyzabc8675309 abc,xyz,8675309 NULL abc**8675309 Index: ql/src/test/results/clientpositive/bucket_groupby.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_groupby.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucket_groupby.q.out (working copy) @@ -15,7 +15,7 @@ value string ds string -Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:heyongqiang, createTime:1259868796, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{transient_lastDdlTime=1259868796}) +Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:athusoo, createTime:1269536337, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{transient_lastDdlTime=1269536337}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: insert overwrite table clustergroupby partition (ds='100') select key, value from src sort by key PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -24,12 +24,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@clustergroupby@ds=100 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select key, count(1) from clustergroupby where ds='100' group by key limit 10 PREHOOK: type: QUERY POSTHOOK: query: explain select key, count(1) from clustergroupby where ds='100' group by key limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '100')) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10))) @@ -111,11 +115,13 @@ PREHOOK: query: select key, count(1) from clustergroupby where ds='100' group by key limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=100 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1923424614/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-01_132_2849994462411502959/10000 POSTHOOK: query: select key, count(1) from clustergroupby where ds='100' group by key limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=100 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1923424614/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-01_132_2849994462411502959/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -132,15 +138,19 @@ POSTHOOK: type: null POSTHOOK: Input: default@clustergroupby POSTHOOK: Output: default@clustergroupby +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: describe extended clustergroupby PREHOOK: type: DESCTABLE POSTHOOK: query: describe extended clustergroupby POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string value string ds string -Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:heyongqiang, createTime:1259868796, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[key], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{last_modified_by=heyongqiang,last_modified_time=1259868817,transient_lastDdlTime=1259868817}) +Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:athusoo, createTime:1269536337, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[key], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{last_modified_by=athusoo,last_modified_time=1269536344,transient_lastDdlTime=1269536344}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: insert overwrite table clustergroupby partition (ds='101') select key, value from src distribute by key PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -149,6 +159,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@clustergroupby@ds=101 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: --normal-- explain select key, count(1) from clustergroupby where ds='101' group by key limit 10 @@ -157,6 +171,10 @@ explain select key, count(1) from clustergroupby where ds='101' group by key limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '101')) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10))) @@ -238,11 +256,15 @@ PREHOOK: query: select key, count(1) from clustergroupby where ds='101' group by key limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=101 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1926882001/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-07_963_1224196948176097438/10000 POSTHOOK: query: select key, count(1) from clustergroupby where ds='101' group by key limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=101 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1926882001/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-07_963_1224196948176097438/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -261,6 +283,10 @@ explain select length(key), count(1) from clustergroupby where ds='101' group by length(key) limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION length (TOK_TABLE_OR_COL key))) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '101')) (TOK_GROUPBY (TOK_FUNCTION length (TOK_TABLE_OR_COL key))) (TOK_LIMIT 10))) @@ -342,11 +368,15 @@ PREHOOK: query: select length(key), count(1) from clustergroupby where ds='101' group by length(key) limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=101 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/695955681/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-11_492_5672307526262847824/10000 POSTHOOK: query: select length(key), count(1) from clustergroupby where ds='101' group by length(key) limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=101 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/695955681/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-11_492_5672307526262847824/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 1 10 2 74 3 416 @@ -356,6 +386,10 @@ POSTHOOK: query: explain select abs(length(key)), count(1) from clustergroupby where ds='101' group by abs(length(key)) limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs (TOK_FUNCTION length (TOK_TABLE_OR_COL key)))) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '101')) (TOK_GROUPBY (TOK_FUNCTION abs (TOK_FUNCTION length (TOK_TABLE_OR_COL key)))) (TOK_LIMIT 10))) @@ -437,11 +471,15 @@ PREHOOK: query: select abs(length(key)), count(1) from clustergroupby where ds='101' group by abs(length(key)) limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=101 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1888922671/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-15_165_7846894206782884098/10000 POSTHOOK: query: select abs(length(key)), count(1) from clustergroupby where ds='101' group by abs(length(key)) limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=101 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1888922671/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-15_165_7846894206782884098/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 1 10 2 74 3 416 @@ -453,6 +491,10 @@ explain select key, count(1) from clustergroupby where ds='101' group by key,3 limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '101')) (TOK_GROUPBY (TOK_TABLE_OR_COL key) 3) (TOK_LIMIT 10))) @@ -542,11 +584,15 @@ PREHOOK: query: select key, count(1) from clustergroupby where ds='101' group by key,3 limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=101 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/297590608/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-18_795_5069551189448328244/10000 POSTHOOK: query: select key, count(1) from clustergroupby where ds='101' group by key,3 limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=101 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/297590608/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-18_795_5069551189448328244/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -565,6 +611,10 @@ explain select key, count(1) from (select value as key, key as value from clustergroupby where ds='101')subq group by key limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL value) key) (TOK_SELEXPR (TOK_TABLE_OR_COL key) value)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '101')))) subq)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10))) @@ -651,11 +701,15 @@ PREHOOK: query: select key, count(1) from (select value as key, key as value from clustergroupby where ds='101')subq group by key limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=101 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1523288366/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-22_436_1694761885463251837/10000 POSTHOOK: query: select key, count(1) from (select value as key, key as value from clustergroupby where ds='101')subq group by key limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=101 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1523288366/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-22_436_1694761885463251837/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] val_0 3 val_10 1 val_100 2 @@ -672,6 +726,10 @@ POSTHOOK: query: explain select key, count(1) from clustergroupby group by key POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) @@ -745,12 +803,16 @@ PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=100 PREHOOK: Input: default@clustergroupby@ds=101 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1435163727/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-26_043_4925818128223402447/10000 POSTHOOK: query: select key, count(1) from clustergroupby group by key POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=100 POSTHOOK: Input: default@clustergroupby@ds=101 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1435163727/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-26_043_4925818128223402447/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 6 10 2 100 4 @@ -1066,6 +1128,10 @@ POSTHOOK: query: explain select key, count(1) from clustergroupby group by key, 3 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (TOK_TABLE_OR_COL key) 3))) @@ -1151,15 +1217,23 @@ POSTHOOK: type: null POSTHOOK: Input: default@clustergroupby POSTHOOK: Output: default@clustergroupby +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: describe extended clustergroupby PREHOOK: type: DESCTABLE POSTHOOK: query: describe extended clustergroupby POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string value string ds string -Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:heyongqiang, createTime:1259868796, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[value], sortCols:[Order(col:key, order:1), Order(col:value, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{last_modified_by=heyongqiang,last_modified_time=1259868863,transient_lastDdlTime=1259868863}) +Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:athusoo, createTime:1269536337, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[value], sortCols:[Order(col:key, order:1), Order(col:value, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{last_modified_by=athusoo,last_modified_time=1269536369,transient_lastDdlTime=1269536369}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: insert overwrite table clustergroupby partition (ds='102') select key, value from src distribute by value sort by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -1168,12 +1242,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@clustergroupby@ds=102 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select key, count(1) from clustergroupby where ds='102' group by key limit 10 PREHOOK: type: QUERY POSTHOOK: query: explain select key, count(1) from clustergroupby where ds='102' group by key limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '102')) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10))) @@ -1255,11 +1341,17 @@ PREHOOK: query: select key, count(1) from clustergroupby where ds='102' group by key limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=102 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1143276036/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-33_164_4191253859420819137/10000 POSTHOOK: query: select key, count(1) from clustergroupby where ds='102' group by key limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=102 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/1143276036/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-33_164_4191253859420819137/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -1276,6 +1368,12 @@ POSTHOOK: query: explain select value, count(1) from clustergroupby where ds='102' group by value limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '102')) (TOK_GROUPBY (TOK_TABLE_OR_COL value)) (TOK_LIMIT 10))) @@ -1357,11 +1455,17 @@ PREHOOK: query: select value, count(1) from clustergroupby where ds='102' group by value limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=102 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/177035338/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-36_819_6730392354555425549/10000 POSTHOOK: query: select value, count(1) from clustergroupby where ds='102' group by value limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=102 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/177035338/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-36_819_6730392354555425549/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] val_0 3 val_10 1 val_100 2 @@ -1378,6 +1482,12 @@ POSTHOOK: query: explain select key, count(1) from clustergroupby where ds='102' group by key, value limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '102')) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)) (TOK_LIMIT 10))) @@ -1469,11 +1579,17 @@ PREHOOK: query: select key, count(1) from clustergroupby where ds='102' group by key, value limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=102 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/971850477/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-40_377_8227883488825963345/10000 POSTHOOK: query: select key, count(1) from clustergroupby where ds='102' group by key, value limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=102 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/971850477/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-40_377_8227883488825963345/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -1490,15 +1606,27 @@ POSTHOOK: type: null POSTHOOK: Input: default@clustergroupby POSTHOOK: Output: default@clustergroupby +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: describe extended clustergroupby PREHOOK: type: DESCTABLE POSTHOOK: query: describe extended clustergroupby POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string value string ds string -Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:heyongqiang, createTime:1259868796, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[value, key], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{last_modified_by=heyongqiang,last_modified_time=1259868885,transient_lastDdlTime=1259868885}) +Detailed Table Information Table(tableName:clustergroupby, dbName:default, owner:athusoo, createTime:1269536337, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/clustergroupby, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[value, key], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null)], parameters:{last_modified_by=athusoo,last_modified_time=1269536383,transient_lastDdlTime=1269536383}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: insert overwrite table clustergroupby partition (ds='103') select key, value from src distribute by value, key sort by key PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -1507,12 +1635,28 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@clustergroupby@ds=103 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select key, count(1) from clustergroupby where ds='103' group by key limit 10 PREHOOK: type: QUERY POSTHOOK: query: explain select key, count(1) from clustergroupby where ds='103' group by key limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '103')) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10))) @@ -1594,11 +1738,19 @@ PREHOOK: query: select key, count(1) from clustergroupby where ds='103' group by key limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=103 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/116849772/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-47_244_5541000741369242327/10000 POSTHOOK: query: select key, count(1) from clustergroupby where ds='103' group by key limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=103 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/116849772/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-47_244_5541000741369242327/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -1615,6 +1767,14 @@ POSTHOOK: query: explain select key, count(1) from clustergroupby where ds='103' group by value, key limit 10 POSTHOOK: type: QUERY +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF clustergroupby)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '103')) (TOK_GROUPBY (TOK_TABLE_OR_COL value) (TOK_TABLE_OR_COL key)) (TOK_LIMIT 10))) @@ -1706,11 +1866,19 @@ PREHOOK: query: select key, count(1) from clustergroupby where ds='103' group by value, key limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@clustergroupby@ds=103 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/881916878/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-50_869_5933623178511957789/10000 POSTHOOK: query: select key, count(1) from clustergroupby where ds='103' group by value, key limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@clustergroupby@ds=103 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/tmp/881916878/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-50_869_5933623178511957789/10000 +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 10 1 100 2 @@ -1726,3 +1894,11 @@ POSTHOOK: query: drop table clustergroupby POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@clustergroupby +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=102).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: clustergroupby PARTITION(ds=103).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby11.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby11.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby11.q.out (working copy) @@ -99,7 +99,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1187956750/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-51_865_7702682920908736079/10004 Reduce Output Operator key expressions: expr: _col0 @@ -167,7 +167,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1187956750/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-51_865_7702682920908736079/10005 Reduce Output Operator key expressions: expr: _col0 @@ -251,14 +251,26 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1@ds=111 POSTHOOK: Output: default@dest2@ds=111 +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT * from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1@ds=111 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1884622458/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-01_751_7027488922541157903/10000 POSTHOOK: query: SELECT * from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1@ds=111 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1884622458/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-01_751_7027488922541157903/10000 +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] val_0 3 1 111 val_10 1 1 111 val_100 2 1 111 @@ -571,11 +583,17 @@ PREHOOK: query: SELECT * from dest2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2@ds=111 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1919394559/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-01_841_6667941889141989413/10000 POSTHOOK: query: SELECT * from dest2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2@ds=111 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1919394559/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-01_841_6667941889141989413/10000 +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 3 1 111 10 1 1 111 100 2 1 111 @@ -890,8 +908,20 @@ POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: drop table dest2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).val1 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1 PARTITION(ds=111).key SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/mapreduce7.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce7.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce7.q.out (working copy) @@ -125,14 +125,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.v SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.k SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/933001468/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-32_892_1723195133091280153/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/933001468/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-32_892_1723195133091280153/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.v SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.k SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/create_udaf.q.out =================================================================== --- ql/src/test/results/clientpositive/create_udaf.q.out (revision 927279) +++ ql/src/test/results/clientpositive/create_udaf.q.out (working copy) @@ -31,16 +31,19 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.col UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1967995374/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-21_624_2580244781901705661/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1967995374/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-10-21_624_2580244781901705661/10000 +POSTHOOK: Lineage: dest1.col UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 7 PREHOOK: query: DROP TEMPORARY FUNCTION test_max PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION test_max POSTHOOK: type: DROPFUNCTION +POSTHOOK: Lineage: dest1.col UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby2_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby2_map.q.out (working copy) @@ -119,14 +119,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/615915015/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-57_247_7543714331003029438/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/615915015/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-57_247_7543714331003029438/10000 +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -84,7 +84,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -95,12 +95,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -163,7 +163,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -174,12 +174,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -232,7 +232,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -243,21 +243,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -271,13 +271,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451577 + transient_lastDdlTime 1269538550 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -288,13 +288,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451577 + transient_lastDdlTime 1269538550 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -306,14 +306,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -323,20 +323,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -352,9 +352,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -365,12 +365,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -381,12 +381,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -395,7 +395,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-19_535_6239891765958059579/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-52_473_765100905816182199/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -406,12 +406,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -437,14 +437,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-24_658_2362100966618189146/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-56_786_1687619468507392976/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-24_658_2362100966618189146/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-56_786_1687619468507392976/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 val_128 val_128 128 val_128 val_128 128 val_128 val_128 @@ -557,11 +563,17 @@ POSTHOOK: query: CREATE TABLE src_copy(key int, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@src_copy +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: CREATE TABLE src1_copy(key string, value string) PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE src1_copy(key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@src1_copy +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: INSERT OVERWRITE TABLE src_copy select key, value from src PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -570,6 +582,11 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@src_copy +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: INSERT OVERWRITE TABLE src1_copy select key, value from src1 PREHOOK: type: QUERY PREHOOK: Input: default@src1 @@ -578,6 +595,13 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@src1_copy +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value @@ -592,6 +616,13 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF src1_copy x) (TOK_TABREF src_copy y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key))) (TOK_TABREF srcpart z) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL z) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST x y))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL z) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) value))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL z) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL z) hr) 11))))) @@ -659,7 +690,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -670,12 +701,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -738,7 +769,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -749,12 +780,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -807,7 +838,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -818,21 +849,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -846,13 +877,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451577 + transient_lastDdlTime 1269538550 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -863,13 +894,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451577 + transient_lastDdlTime 1269538550 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -881,14 +912,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -898,20 +929,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -927,9 +958,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -940,12 +971,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -956,12 +987,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -970,7 +1001,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-35_846_5199312803670707835/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-05_839_7516735476030842325/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -981,12 +1012,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451579 + transient_lastDdlTime 1269538552 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -1012,14 +1043,34 @@ POSTHOOK: Input: default@src_copy POSTHOOK: Input: default@src1_copy POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src_copy)y.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-41_113_2263210092190449095/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-10_147_3654116423691137306/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-41_113_2263210092190449095/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-36-10_147_3654116423691137306/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src_copy)y.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 128 val_128 val_128 128 val_128 val_128 128 val_128 val_128 @@ -1132,13 +1183,43 @@ POSTHOOK: query: drop table src_copy POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@src_copy +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src_copy)y.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table src1_copy PREHOOK: type: DROPTABLE POSTHOOK: query: drop table src1_copy POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@src1_copy +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src_copy)y.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table dest_j1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src_copy)y.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.value SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src1_copy.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_copy.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/ppd_multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_multi_insert.q.out (revision 927279) +++ ql/src/test/results/clientpositive/ppd_multi_insert.q.out (working copy) @@ -243,14 +243,24 @@ POSTHOOK: Output: default@mi2 POSTHOOK: Output: default@mi3@ds=2008-04-08/hr=12 POSTHOOK: Output: ../build/ql/test/data/warehouse/mi4.out +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT mi1.* FROM mi1 PREHOOK: type: QUERY PREHOOK: Input: default@mi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-34-41_902_6905591894666460215/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-47-31_666_8338554073120288735/10000 POSTHOOK: query: SELECT mi1.* FROM mi1 POSTHOOK: type: QUERY POSTHOOK: Input: default@mi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-34-41_902_6905591894666460215/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-47-31_666_8338554073120288735/10000 +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -402,11 +412,16 @@ PREHOOK: query: SELECT mi2.* FROM mi2 PREHOOK: type: QUERY PREHOOK: Input: default@mi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-34-41_936_9202557721170644347/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-47-31_712_6294933537030948674/10000 POSTHOOK: query: SELECT mi2.* FROM mi2 POSTHOOK: type: QUERY POSTHOOK: Input: default@mi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-34-41_936_9202557721170644347/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-47-31_712_6294933537030948674/10000 +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] 100 val_100 100 val_100 100 val_100 @@ -621,11 +636,16 @@ PREHOOK: query: SELECT mi3.* FROM mi3 PREHOOK: type: QUERY PREHOOK: Input: default@mi3@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-34-41_968_153693361750093127/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-47-31_757_8552108181803173224/10000 POSTHOOK: query: SELECT mi3.* FROM mi3 POSTHOOK: type: QUERY POSTHOOK: Input: default@mi3@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-34-41_968_153693361750093127/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-47-31_757_8552108181803173224/10000 +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] 200 2008-04-08 12 200 2008-04-08 12 200 2008-04-08 12 @@ -1300,13 +1320,28 @@ POSTHOOK: query: DROP TABLE mi1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@mi1 +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE mi2 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE mi2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@mi2 +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE mi3 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE mi3 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@mi3 +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE null[(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key SIMPLE null[(src)a.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -63,10 +63,10 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src2] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src1] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src2] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src1] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -77,12 +77,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451556 + transient_lastDdlTime 1269538535 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -93,16 +93,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451556 + transient_lastDdlTime 1269538535 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -116,13 +116,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451554 + transient_lastDdlTime 1269538534 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -133,13 +133,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451554 + transient_lastDdlTime 1269538534 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -174,7 +174,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-05-57_597_1945882948643511938/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-36_389_4151545450252380790/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -185,12 +185,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451557 + transient_lastDdlTime 1269538536 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -200,7 +200,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-05-57_597_1945882948643511938/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-36_389_4151545450252380790/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -210,15 +210,15 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451557 + transient_lastDdlTime 1269538536 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-05-57_597_1945882948643511938/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-36_389_4151545450252380790/10001 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) @@ -233,14 +233,18 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)src1.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-03_309_3672202177246728511/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-40_251_7871595662910397244/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-06-03_309_3672202177246728511/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-40_251_7871595662910397244/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)src1.FieldSchema(name:ds, type:string, comment:null), ] 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy) @@ -53,7 +53,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -64,22 +64,22 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable name tmptable serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452554 + transient_lastDdlTime 1269539333 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -93,13 +93,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452551 + transient_lastDdlTime 1269539332 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,17 +110,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452551 + transient_lastDdlTime 1269539332 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -134,13 +134,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452551 + transient_lastDdlTime 1269539332 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -151,13 +151,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452551 + transient_lastDdlTime 1269539332 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -169,14 +169,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -186,20 +186,20 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable name tmptable serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452554 + transient_lastDdlTime 1269539333 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -217,9 +217,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -230,12 +230,12 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable name tmptable serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452554 + transient_lastDdlTime 1269539333 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -246,12 +246,12 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable name tmptable serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452554 + transient_lastDdlTime 1269539333 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable name: tmptable @@ -260,7 +260,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-34_267_6708092954551902162/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-48-53_898_1090464837168880842/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -271,12 +271,12 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable name tmptable serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452554 + transient_lastDdlTime 1269539333 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable TotalFiles: 1 @@ -295,14 +295,22 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.ds SIMPLE null[(srcpart)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.hr SIMPLE null[(srcpart)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value SIMPLE null[(srcpart)a.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: tmptable.key SIMPLE null[(srcpart)a.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: select * from tmptable x sort by x.key,x.value,x.ds,x.hr PREHOOK: type: QUERY PREHOOK: Input: default@tmptable -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-42_142_4687900567216853460/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-49-00_195_1035440357290145125/10000 POSTHOOK: query: select * from tmptable x sort by x.key,x.value,x.ds,x.hr POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-22-42_142_4687900567216853460/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-49-00_195_1035440357290145125/10000 +POSTHOOK: Lineage: tmptable.ds SIMPLE null[(srcpart)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.hr SIMPLE null[(srcpart)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value SIMPLE null[(srcpart)a.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: tmptable.key SIMPLE null[(srcpart)a.FieldSchema(name:ds, type:string, comment:null), ] 103 val_103 2008-04-08 11 103 val_103 2008-04-08 12 133 val_133 2008-04-08 11 @@ -420,3 +428,7 @@ POSTHOOK: query: drop table tmptable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.ds SIMPLE null[(srcpart)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.hr SIMPLE null[(srcpart)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value SIMPLE null[(srcpart)a.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: tmptable.key SIMPLE null[(srcpart)a.FieldSchema(name:ds, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/groupby4_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby4_noskew.q.out (working copy) @@ -82,14 +82,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2133291489/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-09_966_5171363706565291479/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2133291489/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-09_966_5171363706565291479/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 2 Index: ql/src/test/results/clientpositive/input12.q.out =================================================================== --- ql/src/test/results/clientpositive/input12.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input12.q.out (working copy) @@ -132,7 +132,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/790485692/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-15_215_1492282431419821497/10000 Stage: Stage-0 Move Operator @@ -147,7 +147,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1323749070/10006 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-15_215_1492282431419821497/10006 Reduce Output Operator sort order: Map-reduce partition columns: @@ -177,7 +177,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/790485692/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-15_215_1492282431419821497/10002 Stage: Stage-1 Move Operator @@ -192,7 +192,7 @@ Stage: Stage-7 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1323749070/10007 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-15_215_1492282431419821497/10007 Reduce Output Operator sort order: Map-reduce partition columns: @@ -222,7 +222,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/790485692/10004 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-15_215_1492282431419821497/10004 Stage: Stage-2 Move Operator @@ -240,7 +240,7 @@ Stage: Stage-10 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1323749070/10008 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-15_215_1492282431419821497/10008 Reduce Output Operator sort order: Map-reduce partition columns: @@ -280,14 +280,24 @@ POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 POSTHOOK: Output: default@dest3@ds=2008-04-08/hr=12 +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1898663896/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-19_612_3361422954995829620/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1898663896/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-19_612_3361422954995829620/10000 +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 27 val_27 98 val_98 @@ -375,11 +385,16 @@ PREHOOK: query: SELECT dest2.* FROM dest2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1913424809/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-19_656_3886098371603531571/10000 POSTHOOK: query: SELECT dest2.* FROM dest2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1913424809/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-19_656_3886098371603531571/10000 +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 165 val_165 193 val_193 150 val_150 @@ -488,11 +503,16 @@ PREHOOK: query: SELECT dest3.* FROM dest3 PREHOOK: type: QUERY PREHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/454418887/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-19_698_8681598060559747401/10000 POSTHOOK: query: SELECT dest3.* FROM dest3 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/454418887/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-19_698_8681598060559747401/10000 +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 238 2008-04-08 12 311 2008-04-08 12 409 2008-04-08 12 Index: ql/src/test/results/clientpositive/join30.q.out =================================================================== --- ql/src/test/results/clientpositive/join30.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join30.q.out (working copy) @@ -80,7 +80,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1478873095/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-06_657_7001884250285086971/10002 Select Operator expressions: expr: _col0 @@ -168,14 +168,18 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.cnt UDAF null[(src1)x.null, (src)y.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/292750136/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-13_119_7416276232953649932/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/292750136/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-13_119_7416276232953649932/10000 +POSTHOOK: Lineage: dest_j1.cnt UDAF null[(src1)x.null, (src)y.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 66 1 98 2 128 3 @@ -196,3 +200,5 @@ POSTHOOK: query: drop TABLE dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.cnt UDAF null[(src1)x.null, (src)y.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input35.q.out =================================================================== --- ql/src/test/results/clientpositive/input35.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input35.q.out (working copy) @@ -80,7 +80,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/594124365/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-08_767_3147200997026325298/10000 Stage: Stage-0 Move Operator @@ -95,7 +95,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1351890816/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-08_767_3147200997026325298/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -139,14 +139,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/578807843/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-12_185_7088494208008800101/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/578807843/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-12_185_7088494208008800101/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 Index: ql/src/test/results/clientpositive/rcfile_bigdata.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_bigdata.q.out (revision 927279) +++ ql/src/test/results/clientpositive/rcfile_bigdata.q.out (working copy) @@ -27,23 +27,31 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@columntable_bigdata +POSTHOOK: Lineage: columntable_bigdata.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable_bigdata.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: describe columnTable_Bigdata PREHOOK: type: DESCTABLE POSTHOOK: query: describe columnTable_Bigdata POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: columntable_bigdata.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable_bigdata.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] key string from deserializer value string from deserializer PREHOOK: query: select count(columnTable_Bigdata.key) from columnTable_Bigdata PREHOOK: type: QUERY PREHOOK: Input: default@columntable_bigdata -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/438172653/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-49-24_022_2257363832888015731/10000 POSTHOOK: query: select count(columnTable_Bigdata.key) from columnTable_Bigdata POSTHOOK: type: QUERY POSTHOOK: Input: default@columntable_bigdata -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/438172653/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-49-24_022_2257363832888015731/10000 +POSTHOOK: Lineage: columntable_bigdata.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable_bigdata.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 5005500 PREHOOK: query: DROP TABLE columnTable_Bigdata PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE columnTable_Bigdata POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@columntable_bigdata +POSTHOOK: Lineage: columntable_bigdata.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable_bigdata.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input3_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input3_limit.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input3_limit.q.out (working copy) @@ -90,7 +90,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive5/hive5/build/ql/tmp/1571190278/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-51_427_3879003422136632892/10002 Reduce Output Operator sort order: tag: -1 @@ -130,14 +130,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Output: default@t2 +POSTHOOK: Lineage: t2.value SIMPLE null[(t1)t1.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: t2.key SIMPLE null[(t1)t1.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: SELECT * FROM T2 SORT BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@t2 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2048970411/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-57_501_4507045504602455678/10000 POSTHOOK: query: SELECT * FROM T2 SORT BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2048970411/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-57_501_4507045504602455678/10000 +POSTHOOK: Lineage: t2.value SIMPLE null[(t1)t1.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: t2.key SIMPLE null[(t1)t1.FieldSchema(name:key, type:string, comment:null), ] 0 val_0 0 val_0 0 val_0 @@ -163,8 +167,12 @@ POSTHOOK: query: DROP TABLE T1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t1 +POSTHOOK: Lineage: t2.value SIMPLE null[(t1)t1.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: t2.key SIMPLE null[(t1)t1.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: DROP TABLE T2 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t2 +POSTHOOK: Lineage: t2.value SIMPLE null[(t1)t1.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: t2.key SIMPLE null[(t1)t1.FieldSchema(name:key, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/mapreduce2.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce2.q.out (working copy) @@ -113,14 +113,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1481559140/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-00_408_1306535404459074171/10000 POSTHOOK: query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1481559140/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-00_408_1306535404459074171/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 0 0 val_0 0 0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/create_genericudf.q.out =================================================================== --- ql/src/test/results/clientpositive/create_genericudf.q.out (revision 927279) +++ ql/src/test/results/clientpositive/create_genericudf.q.out (working copy) @@ -49,16 +49,37 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c7 SIMPLE null[] +POSTHOOK: Lineage: dest1.c6 SIMPLE null[] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 LIMIT 1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/161564459/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-46_605_7357749796166445833/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 LIMIT 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/161564459/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-46_605_7357749796166445833/10000 +POSTHOOK: Lineage: dest1.c7 SIMPLE null[] +POSTHOOK: Lineage: dest1.c6 SIMPLE null[] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] bbc bcc NULL NULL NULL bc abc PREHOOK: query: DROP TEMPORARY FUNCTION test_translate PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION test_translate POSTHOOK: type: DROPFUNCTION +POSTHOOK: Lineage: dest1.c7 SIMPLE null[] +POSTHOOK: Lineage: dest1.c6 SIMPLE null[] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] Index: ql/src/test/results/clientpositive/bucket3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucket3.q.out (working copy) @@ -49,9 +49,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -62,12 +62,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133766 + transient_lastDdlTime 1269536315 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -78,12 +78,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133766 + transient_lastDdlTime 1269536315 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -99,7 +99,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-36-07_876_1324315962912856358/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-35_814_2073666816718382713/10000 NumFilesPerFileSink: 2 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -111,13 +111,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket3_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket3_1 name bucket3_1 partition_columns ds serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133767 + transient_lastDdlTime 1269536315 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket3_1 TotalFiles: 2 @@ -129,7 +129,7 @@ partition: ds 1 replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-36-07_876_1324315962912856358/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-35_814_2073666816718382713/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -140,16 +140,16 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket3_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket3_1 name bucket3_1 partition_columns ds serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133767 + transient_lastDdlTime 1269536315 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket3_1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-36-07_876_1324315962912856358/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-35_814_2073666816718382713/10001 PREHOOK: query: insert overwrite table bucket3_1 partition (ds='1') @@ -162,6 +162,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bucket3_1@ds=1 +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table bucket3_1 partition (ds='2') select * from src PREHOOK: type: QUERY @@ -172,12 +174,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bucket3_1@ds=2 +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key PREHOOK: type: QUERY POSTHOOK: query: explain select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF bucket3_1 (TOK_TABLESAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))))) @@ -243,11 +253,15 @@ PREHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key PREHOOK: type: QUERY PREHOOK: Input: default@bucket3_1@ds=1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-36-16_445_4212015408358894345/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-42_684_2700058469423017551/10000 POSTHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucket3_1@ds=1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-36-16_445_4212015408358894345/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-42_684_2700058469423017551/10000 +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 1 0 val_0 1 0 val_0 1 @@ -500,3 +514,7 @@ POSTHOOK: query: drop table bucket3_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucket3_1 +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input_lazyserde.q.out =================================================================== --- ql/src/test/results/clientpositive/input_lazyserde.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_lazyserde.q.out (working copy) @@ -99,14 +99,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT dest1.* FROM dest1 DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/4821465/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-57_325_9156769792767732995/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 DISTRIBUTE BY 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/4821465/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-57_325_9156769792767732995/10000 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -121,11 +131,16 @@ PREHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/670788177/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-00_380_5496625792509914719/10000 POSTHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/670788177/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-00_380_5496625792509914719/10000 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 @@ -142,11 +157,21 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: CREATE TABLE dest1(a array) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\' PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE dest1(a array) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint FROM src_thrift DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@src_thrift @@ -155,14 +180,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT * from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1650408145/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-06_575_907349352373359882/10000 POSTHOOK: query: SELECT * from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1650408145/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-06_575_907349352373359882/10000 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] [0,0,0] [1,2,3] [2,4,6] @@ -179,11 +216,23 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: CREATE TABLE dest1(a map) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\' PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE dest1(a map) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.mstringstring FROM src_thrift DISTRIBUTE BY 1 PREHOOK: type: QUERY PREHOOK: Input: default@src_thrift @@ -192,14 +241,28 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT * from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/944903832/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-09_702_8713172366907392025/10000 POSTHOOK: query: SELECT * from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/944903832/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-09_702_8713172366907392025/10000 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] {"key_0":"value_0"} {"key_1":"value_1"} {"key_2":"value_2"} @@ -216,3 +279,10 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] Index: ql/src/test/results/clientpositive/join4.q.out =================================================================== --- ql/src/test/results/clientpositive/join4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join4.q.out (working copy) @@ -202,14 +202,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1834732501/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-34-21_332_2539449899795857933/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1834732501/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-34-21_332_2539449899795857933/10000 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 NULL NULL 12 val_12 NULL NULL 12 val_12 NULL NULL Index: ql/src/test/results/clientpositive/input_dynamicserde.q.out =================================================================== --- ql/src/test/results/clientpositive/input_dynamicserde.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_dynamicserde.q.out (working copy) @@ -69,7 +69,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/969895048/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-45_190_2856482142244960659/10000 Stage: Stage-0 Move Operator @@ -84,7 +84,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1117271472/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-45_190_2856482142244960659/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -124,14 +124,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/587827519/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-48_543_7535676892464325154/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/587827519/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-48_543_7535676892464325154/10000 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -146,11 +156,16 @@ PREHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2127282310/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-48_589_2413193702562475374/10000 POSTHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2127282310/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-48_589_2413193702562475374/10000 +POSTHOOK: Lineage: dest1.e SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:astring, type:string, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.d SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.c SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.b SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.a SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 Index: ql/src/test/results/clientpositive/input9.q.out =================================================================== --- ql/src/test/results/clientpositive/input9.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input9.q.out (working copy) @@ -66,7 +66,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/295573693/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-25_199_9141105823594791762/10000 Stage: Stage-0 Move Operator @@ -81,7 +81,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/2038979956/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-25_199_9141105823594791762/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -115,11 +115,15 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/407173449/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-28_674_6549398802742051982/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/407173449/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-28_674_6549398802742051982/10000 +POSTHOOK: Lineage: dest1.key SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[] Index: ql/src/test/results/clientpositive/union17.q.out =================================================================== --- ql/src/test/results/clientpositive/union17.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union17.q.out (working copy) @@ -90,7 +90,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/658847227/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-28-55_421_1911254232011066301/10004 Union Reduce Output Operator key expressions: @@ -106,7 +106,7 @@ type: string expr: _col1 type: string - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/658847227/10007 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-28-55_421_1911254232011066301/10007 Union Reduce Output Operator key expressions: @@ -160,7 +160,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/658847227/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-28-55_421_1911254232011066301/10005 Reduce Output Operator key expressions: expr: _col0 @@ -212,7 +212,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/658847227/10006 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-28-55_421_1911254232011066301/10006 Reduce Output Operator key expressions: expr: _col0 @@ -308,14 +308,24 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/132652809/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-11_135_28993532720445797/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/132652809/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-11_135_28993532720445797/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 @@ -629,11 +639,16 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/21838102/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-11_180_4382720441437637638/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/21838102/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-11_180_4382720441437637638/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 1 10 val_10 1 100 val_100 1 @@ -949,8 +964,18 @@ POSTHOOK: query: drop table DEST1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table DEST2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table DEST2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val2 UDAF null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input30.q.out =================================================================== --- ql/src/test/results/clientpositive/input30.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input30.q.out (working copy) @@ -102,22 +102,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tst_dest30 +POSTHOOK: Lineage: tst_dest30.a UDAF null[(src)src.null, ] PREHOOK: query: select * from tst_dest30 PREHOOK: type: QUERY PREHOOK: Input: default@tst_dest30 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/856509307/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-39_849_1510046053692273153/10000 POSTHOOK: query: select * from tst_dest30 POSTHOOK: type: QUERY POSTHOOK: Input: default@tst_dest30 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/856509307/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-39_849_1510046053692273153/10000 +POSTHOOK: Lineage: tst_dest30.a UDAF null[(src)src.null, ] 18 PREHOOK: query: drop table tst_dest30 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tst_dest30 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tst_dest30 +POSTHOOK: Lineage: tst_dest30.a UDAF null[(src)src.null, ] PREHOOK: query: drop table dest30 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest30 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest30 +POSTHOOK: Lineage: tst_dest30.a UDAF null[(src)src.null, ] Index: ql/src/test/results/clientpositive/udf7.q.out =================================================================== --- ql/src/test/results/clientpositive/udf7.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf7.q.out (working copy) @@ -11,6 +11,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0), LOG(-1), ROUND(LOG2(3.0),12), LOG2(0.0), LOG2(-1), @@ -27,6 +28,7 @@ POW(2,3), POWER(2,3), POWER(2,-3), POWER(0.5, -3), POWER(4, 0.5), POWER(-1, 0.5), POWER(-1, 2) FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LN 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LN 0.0)) (TOK_SELEXPR (TOK_FUNCTION LN (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG2 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG2 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG2 (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG10 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG10 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG10 (- 1))) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION LOG 2 3.0) 12)) (TOK_SELEXPR (TOK_FUNCTION LOG 2 0.0)) (TOK_SELEXPR (TOK_FUNCTION LOG 2 (- 1))) (TOK_SELEXPR (TOK_FUNCTION LOG 0.5 2)) (TOK_SELEXPR (TOK_FUNCTION LOG 2 0.5)) (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_FUNCTION EXP 2.0) 12)) (TOK_SELEXPR (TOK_FUNCTION POW 2 3)) (TOK_SELEXPR (TOK_FUNCTION POWER 2 3)) (TOK_SELEXPR (TOK_FUNCTION POWER 2 (- 3))) (TOK_SELEXPR (TOK_FUNCTION POWER 0.5 (- 3))) (TOK_SELEXPR (TOK_FUNCTION POWER 4 0.5)) (TOK_SELEXPR (TOK_FUNCTION POWER (- 1) 0.5)) (TOK_SELEXPR (TOK_FUNCTION POWER (- 1) 2))))) @@ -114,7 +116,7 @@ POWER(-1, 0.5), POWER(-1, 2) FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2065510787/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-44_032_5167297332929893339/10000 POSTHOOK: query: SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0), LOG(-1), ROUND(LOG2(3.0),12), LOG2(0.0), LOG2(-1), ROUND(LOG10(3.0),12), LOG10(0.0), LOG10(-1), ROUND(LOG(2, 3.0),12), @@ -123,5 +125,6 @@ POWER(-1, 0.5), POWER(-1, 2) FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2065510787/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-44_032_5167297332929893339/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 1.098612288668 NULL NULL 1.098612288668 NULL NULL 1.584962500721 NULL NULL 0.47712125472 NULL NULL 1.584962500721 NULL NULL NULL -1.0 7.389056098931 8.0 8.0 0.125 8.0 2.0 NaN 1.0 Index: ql/src/test/results/clientpositive/groupby7.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby7.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby7.q.out (working copy) @@ -22,14 +22,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1435060476/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-18_081_5071158744921138164/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1435060476/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-18_081_5071158744921138164/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 @@ -342,11 +350,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/322094219/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-18_135_1534751639114334030/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/322094219/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-18_135_1534751639114334030/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/udf_testlength.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_testlength.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_testlength.q.out (working copy) @@ -31,14 +31,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1188390737/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-23-00_952_3891084452048186955/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1188390737/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-23-00_952_3891084452048186955/10000 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 7 6 7 @@ -543,3 +545,4 @@ PREHOOK: type: DROPFUNCTION POSTHOOK: query: DROP TEMPORARY FUNCTION testlength POSTHOOK: type: DROPFUNCTION +POSTHOOK: Lineage: dest1.len SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/rcfile_columnar.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_columnar.q.out (revision 927279) +++ ql/src/test/results/clientpositive/rcfile_columnar.q.out (working copy) @@ -27,20 +27,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@columntable +POSTHOOK: Lineage: columntable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: describe columnTable PREHOOK: type: DESCTABLE POSTHOOK: query: describe columnTable POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: columntable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] key string from deserializer value string from deserializer PREHOOK: query: SELECT columnTable.* FROM columnTable PREHOOK: type: QUERY PREHOOK: Input: default@columntable -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/673584985/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-49-51_664_4953392597959120877/10000 POSTHOOK: query: SELECT columnTable.* FROM columnTable POSTHOOK: type: QUERY POSTHOOK: Input: default@columntable -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/673584985/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-49-51_664_4953392597959120877/10000 +POSTHOOK: Lineage: columntable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 @@ -56,3 +62,5 @@ POSTHOOK: query: DROP TABLE columnTable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@columntable +POSTHOOK: Lineage: columntable.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: columntable.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby8_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby8_map_skew.q.out (working copy) @@ -83,7 +83,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1093941271/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-17_493_242402874451751497/10004 Reduce Output Operator key expressions: expr: _col0 @@ -142,7 +142,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1093941271/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-17_493_242402874451751497/10005 Reduce Output Operator key expressions: expr: _col0 @@ -213,14 +213,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/13414004/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-26_914_621525058414027788/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/13414004/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-26_914_621525058414027788/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 @@ -533,11 +541,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/853838431/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-26_967_6544390570094922860/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/853838431/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-26_967_6544390570094922860/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 927279) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -50,7 +50,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -61,21 +61,21 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452794 + transient_lastDdlTime 1269539542 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition base file name: srcbucket0.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -87,12 +87,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -104,12 +104,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -121,14 +121,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -138,20 +138,20 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452794 + transient_lastDdlTime 1269539542 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -165,9 +165,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -178,12 +178,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452794 + transient_lastDdlTime 1269539542 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -194,12 +194,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452794 + transient_lastDdlTime 1269539542 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -208,7 +208,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-34_865_7458993159136131237/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-22_499_7680041421154576416/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -219,12 +219,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452794 + transient_lastDdlTime 1269539542 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -241,14 +241,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-38_801_4822170673838042787/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-25_961_4902708404468913006/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-38_801_4822170673838042787/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-25_961_4902708404468913006/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 468 val_469 272 val_273 448 val_449 @@ -504,6 +508,8 @@ POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 4 4 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) @@ -550,9 +556,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt Partition base file name: srcbucket1.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -564,12 +570,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -581,12 +587,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -595,7 +601,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-38_858_6187128523753792903/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-26_021_1352123097480258241/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -616,12 +622,14 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-39_186_6361230402783059507/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-26_264_7563015126730349410/10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-39_186_6361230402783059507/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-26_264_7563015126730349410/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 3 val_4 11 val_11 11 val_12 @@ -867,6 +875,8 @@ POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) @@ -913,9 +923,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition base file name: srcbucket0.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -927,12 +937,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -944,12 +954,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -958,7 +968,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-42_810_6155107510252532111/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-29_384_8056565871632229497/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -979,12 +989,14 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-43_145_3963831387027634370/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-29_634_2935704185711745760/10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-43_145_3963831387027634370/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-29_634_2935704185711745760/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 0 val_0 0 val_0 0 val_0 @@ -1484,6 +1496,8 @@ POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 3 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) @@ -1530,9 +1544,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Partition base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat @@ -1544,12 +1558,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1561,12 +1575,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -1575,7 +1589,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-46_893_5913931803998585116/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-32_691_701636372550426278/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1596,12 +1610,14 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-47_238_4686952558966634433/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-32_946_623307587794634291/10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-47_238_4686952558966634433/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-32_946_623307587794634291/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 0 val_0 0 val_0 0 val_0 @@ -1944,6 +1960,8 @@ POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 2 3 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) @@ -1990,9 +2008,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Partition base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat @@ -2004,12 +2022,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2021,12 +2039,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -2035,7 +2053,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-50_869_837005564035845652/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-36_127_8585305545394327331/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2056,12 +2074,14 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-51_198_3705211988708424910/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-36_385_6922163519236095967/10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-51_198_3705211988708424910/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-36_385_6922163519236095967/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 1 val_2 4 val_4 4 val_5 @@ -2390,6 +2410,8 @@ POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket2 (TOK_TABLESAMPLE 1 2 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) @@ -2436,10 +2458,10 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt [s] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt Partition base file name: srcbucket20.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -2451,12 +2473,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2 name srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2468,16 +2490,16 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2 name srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket2 name: srcbucket2 - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt Partition base file name: srcbucket22.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -2489,12 +2511,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2 name srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2506,12 +2528,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2 name srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket2 name: srcbucket2 @@ -2520,7 +2542,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-54_857_6047708236093309895/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-39_567_4370550812166699238/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2541,12 +2563,14 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-55_208_8205974269447061416/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-39_832_5367118202180137878/10000 POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket2 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-55_208_8205974269447061416/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-39_832_5367118202180137878/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 0 val_0 0 val_0 0 val_0 @@ -2675,6 +2699,8 @@ POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket2 (TOK_TABLESAMPLE 2 4 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) @@ -2721,9 +2747,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt Partition base file name: srcbucket21.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -2735,12 +2761,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2 name srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2752,12 +2778,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket2 name srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452793 + transient_lastDdlTime 1269539541 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket2 name: srcbucket2 @@ -2766,7 +2792,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-58_926_5174735657095432369/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-43_066_561805494407652044/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2787,12 +2813,14 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-59_269_8451738025383627014/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-43_319_1888223782028179811/10000 POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket2 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-59_269_8451738025383627014/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-43_319_1888223782028179811/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 5 val_5 5 val_5 5 val_5 @@ -2836,12 +2864,16 @@ POSTHOOK: query: CREATE TABLE empty_bucket (key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@empty_bucket +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM empty_bucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM empty_bucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF empty_bucket (TOK_TABLESAMPLE 1 2 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) @@ -2892,7 +2924,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-03_250_8891041473469822439/10001 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-46_468_7459104464435589895/10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2913,19 +2945,25 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@empty_bucket -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-03_557_5777663612881038098/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-46_710_6574958646531029302/10000 POSTHOOK: query: SELECT s.* FROM empty_bucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@empty_bucket -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-03_557_5777663612881038098/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-46_710_6574958646531029302/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table empty_bucket PREHOOK: type: DROPTABLE POSTHOOK: query: drop table empty_bucket POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@empty_bucket +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/join27.q.out =================================================================== --- ql/src/test/results/clientpositive/join27.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join27.q.out (working copy) @@ -146,7 +146,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1309444189/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-26_303_1764323642543487418/10000 Stage: Stage-0 Move Operator @@ -161,7 +161,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1401557787/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-26_303_1764323642543487418/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -201,14 +201,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key, x.value PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1599793169/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-30_149_7577292105541728128/10000 POSTHOOK: query: select * from dest_j1 x order by x.key, x.value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1599793169/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-30_149_7577292105541728128/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] NULL val_165 val_165 NULL val_165 val_165 NULL val_193 val_193 @@ -255,3 +261,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/udf_length.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_length.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_length.q.out (working copy) @@ -58,7 +58,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1477901975/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-17-33_669_3485124574378219493/10000 Stage: Stage-0 Move Operator @@ -73,7 +73,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2112708901/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-17-33_669_3485124574378219493/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -103,14 +103,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1063857688/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-17-36_890_8095203762015669935/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1063857688/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-17-36_890_8095203762015669935/10000 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] 7 0 7 @@ -141,6 +143,7 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: -- Test with non-ascii characters. CREATE TABLE dest1(name STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE @@ -148,15 +151,18 @@ CREATE TABLE dest1(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1 PREHOOK: type: LOAD POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1 POSTHOOK: type: LOAD POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN SELECT length(dest1.name) FROM dest1 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT length(dest1.name) FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION length (. (TOK_TABLE_OR_COL dest1) name)))))) @@ -191,14 +197,16 @@ PREHOOK: query: SELECT length(dest1.name) FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/249846152/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-17-37_253_4676866482162181587/10000 POSTHOOK: query: SELECT length(dest1.name) FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/249846152/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-17-37_253_4676866482162181587/10000 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] 2 PREHOOK: query: DROP TABLE dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/union12.q.out =================================================================== --- ql/src/test/results/clientpositive/union12.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union12.q.out (working copy) @@ -84,7 +84,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1908722862/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-52_161_7061820698497935894/10002 Union Select Operator expressions: @@ -108,7 +108,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1908722862/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-52_161_7061820698497935894/10004 Union Select Operator expressions: @@ -132,7 +132,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1908722862/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-52_161_7061820698497935894/10005 Union Select Operator expressions: @@ -164,7 +164,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1028672484/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-52_161_7061820698497935894/10000 Stage: Stage-0 Move Operator @@ -179,7 +179,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1908722862/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-27-52_161_7061820698497935894/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -305,14 +305,18 @@ POSTHOOK: Input: default@src1 POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src1)s2.null, (srcbucket)s3.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] PREHOOK: query: select * from tmptable x sort by x.key PREHOOK: type: QUERY PREHOOK: Input: default@tmptable -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/65730204/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-28-08_387_2056401867430947579/10000 POSTHOOK: query: select * from tmptable x sort by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/65730204/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-28-08_387_2056401867430947579/10000 +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src1)s2.null, (srcbucket)s3.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] tst1 500 tst2 25 tst3 1000 @@ -321,3 +325,5 @@ POSTHOOK: query: drop table tmptable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.value SET null[(src)s1.null, (src1)s2.null, (srcbucket)s3.null, ] +POSTHOOK: Lineage: tmptable.key SET null[] Index: ql/src/test/results/clientpositive/groupby1_map_nomap.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (working copy) @@ -103,14 +103,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/156800012/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-22_434_7156952637549765882/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/156800012/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-22_434_7156952637549765882/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/udf2.q.out =================================================================== --- ql/src/test/results/clientpositive/udf2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf2.q.out (working copy) @@ -11,12 +11,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION trim (. (TOK_TABLE_OR_COL dest1) c1))) (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION rtrim (. (TOK_TABLE_OR_COL dest1) c1))) (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION ltrim (. (TOK_TABLE_OR_COL dest1) c1))) (TOK_SELEXPR '|')))) @@ -63,9 +65,10 @@ PREHOOK: query: SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1681092465/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-09-56_792_6463159510198536584/10000 POSTHOOK: query: SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1681092465/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-09-56_792_6463159510198536584/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] | abc | abc | abc | Index: ql/src/test/results/clientpositive/join36.q.out =================================================================== --- ql/src/test/results/clientpositive/join36.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join36.q.out (working copy) @@ -35,6 +35,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tmp1 +POSTHOOK: Lineage: tmp1.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: INSERT OVERWRITE TABLE tmp2 SELECT key, count(1) from src group by key PREHOOK: type: QUERY @@ -45,6 +47,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@tmp2 +POSTHOOK: Lineage: tmp2.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp1.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt @@ -55,6 +61,10 @@ SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt FROM tmp1 x JOIN tmp2 y ON (x.key = y.key) POSTHOOK: type: QUERY +POSTHOOK: Lineage: tmp2.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp1.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF tmp1 x) (TOK_TABREF tmp2 y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST x))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) cnt)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) cnt))))) @@ -166,7 +176,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/959558219/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-36_269_7387586097298017144/10000 Stage: Stage-0 Move Operator @@ -181,7 +191,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1816987590/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-36_269_7387586097298017144/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -221,14 +231,28 @@ POSTHOOK: Input: default@tmp2 POSTHOOK: Input: default@tmp1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(tmp1)x.FieldSchema(name:cnt, type:int, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: tmp2.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp1.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1839162600/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-39_890_7413429018729989506/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1839162600/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-39_890_7413429018729989506/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(tmp1)x.FieldSchema(name:cnt, type:int, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: tmp2.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp1.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 3 3 2 1 1 4 1 1 @@ -543,3 +567,10 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(tmp1)x.FieldSchema(name:cnt, type:int, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: tmp2.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmp1.cnt UDAF null[(src)src.null, ] +POSTHOOK: Lineage: tmp1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input18.q.out =================================================================== --- ql/src/test/results/clientpositive/input18.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input18.q.out (working copy) @@ -125,14 +125,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/542338951/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-07_602_8001169924488374374/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/542338951/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-07_602_8001169924488374374/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0+3+7 0 val_0+3+7 0 val_0+3+7 Index: ql/src/test/results/clientpositive/groupby2.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby2.q.out (working copy) @@ -67,7 +67,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2060806095/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-39_204_7135729940660247827/10002 Reduce Output Operator key expressions: expr: _col0 @@ -141,14 +141,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_g2 +POSTHOOK: Lineage: dest_g2.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest_g2.* FROM dest_g2 PREHOOK: type: QUERY PREHOOK: Input: default@dest_g2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1250974216/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-45_685_5627051348560133811/10000 POSTHOOK: query: SELECT dest_g2.* FROM dest_g2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_g2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1250974216/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-45_685_5627051348560133811/10000 +POSTHOOK: Lineage: dest_g2.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/input_part5.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_part5.q.out (working copy) @@ -67,7 +67,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1914556139/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-52_283_7057089470780935225/10000 Stage: Stage-0 Move Operator @@ -82,7 +82,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1956159984/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-52_283_7057089470780935225/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -122,14 +122,22 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.ds SIMPLE null[(srcpart)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.hr SIMPLE null[(srcpart)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value SIMPLE null[(srcpart)x.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: tmptable.key SIMPLE null[(srcpart)x.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: select * from tmptable x sort by x.key,x.value,x.ds,x.hr PREHOOK: type: QUERY PREHOOK: Input: default@tmptable -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1308089545/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-58_790_1170703352037506248/10000 POSTHOOK: query: select * from tmptable x sort by x.key,x.value,x.ds,x.hr POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1308089545/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-58_790_1170703352037506248/10000 +POSTHOOK: Lineage: tmptable.ds SIMPLE null[(srcpart)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.hr SIMPLE null[(srcpart)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value SIMPLE null[(srcpart)x.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: tmptable.key SIMPLE null[(srcpart)x.FieldSchema(name:ds, type:string, comment:null), ] 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 @@ -303,3 +311,7 @@ POSTHOOK: query: drop table tmptable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tmptable +POSTHOOK: Lineage: tmptable.ds SIMPLE null[(srcpart)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.hr SIMPLE null[(srcpart)x.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tmptable.value SIMPLE null[(srcpart)x.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: tmptable.key SIMPLE null[(srcpart)x.FieldSchema(name:ds, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -164,7 +164,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -175,12 +175,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -238,7 +238,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -249,12 +249,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -263,12 +263,12 @@ Alias Bucket Base File Name Mapping: b {srcbucket22.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket23.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -282,13 +282,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -300,13 +300,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -318,14 +318,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -335,20 +335,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -364,9 +364,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -377,12 +377,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -393,12 +393,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -407,7 +407,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-24_901_6063628844240471022/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-27_799_2287581298987186393/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -418,12 +418,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -446,14 +446,20 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-39_901_7647152158354564599/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-34_815_1331011257051516961/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-09-39_901_7647152158354564599/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-34_815_1331011257051516961/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] 564 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -465,6 +471,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin_part_2 a join srcbucket_mapjoin_part b @@ -481,14 +493,32 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-13_576_719486557407694216/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-47_307_1293011141352202006/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-13_576_719486557407694216/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-47_307_1293011141352202006/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] 564 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -500,20 +530,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-31_144_5464392401940092975/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-53_468_5149568277616100357/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-31_144_5464392401940092975/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-53_468_5149568277616100357/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] 0 0 0 PREHOOK: query: explain extended insert overwrite table bucketmapjoin_tmp_result @@ -527,6 +581,18 @@ from srcbucket_mapjoin_part_2 a join srcbucket_mapjoin_part b on a.key=b.key and b.ds="2008-04-08" and a.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcbucket_mapjoin_part_2 a) (TOK_TABREF srcbucket_mapjoin_part b) (and (and (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL b) ds) "2008-04-08")) (= (. (TOK_TABLE_OR_COL a) ds) "2008-04-08")))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB bucketmapjoin_tmp_result)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))))) @@ -587,7 +653,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -598,12 +664,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -661,7 +727,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -672,12 +738,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -686,12 +752,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt], srcbucket22.txt=[srcbucket22.txt], srcbucket23.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - a {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + a {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -705,13 +771,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348963 + transient_lastDdlTime 1269536666 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -723,13 +789,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348963 + transient_lastDdlTime 1269536666 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -741,14 +807,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -758,20 +824,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -787,9 +853,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -800,12 +866,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -816,12 +882,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -830,7 +896,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-41_922_1679197703118787011/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-04-56_818_8486870360460522841/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -841,12 +907,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268348964 + transient_lastDdlTime 1269536667 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -869,14 +935,44 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-57_875_3330503946722950502/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-04_688_3533514539906155061/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-10-57_875_3330503946722950502/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-04_688_3533514539906155061/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] 564 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -888,6 +984,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin_part_2 a join srcbucket_mapjoin_part b @@ -904,14 +1018,56 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-35_292_959880905039693104/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-19_246_7287770550817535581/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-35_292_959880905039693104/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-19_246_7287770550817535581/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] 564 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -923,48 +1079,240 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-48_717_4508199287729084294/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-25_519_7228376767696362825/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-48_717_4508199287729084294/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-25_519_7228376767696362825/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] 0 0 0 PREHOOK: query: drop table bucketmapjoin_hash_result_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table bucketmapjoin_hash_result_1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table bucketmapjoin_tmp_result PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_tmp_result POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin_part)b.FieldSchema(name:key, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/input_part10.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part10.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_part10.q.out (working copy) @@ -89,27 +89,35 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@part_special@ds=2008 04 08/ts=10%3A11%3A12%3D455 +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).b SIMPLE null[] +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).a SIMPLE null[] PREHOOK: query: DESCRIBE EXTENDED part_special PARTITION(ds='2008 04 08', ts = '10:11:12=455') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED part_special PARTITION(ds='2008 04 08', ts = '10:11:12=455') POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).b SIMPLE null[] +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).a SIMPLE null[] a string b string ds string ts string -Detailed Partition Information Partition(values:[2008 04 08, 10:11:12=455], dbName:default, tableName:part_special, createTime:1256125568, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/part_special/ds=2008 04 08/ts=10%3A11%3A12%3D455, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1256125568}) +Detailed Partition Information Partition(values:[2008 04 08, 10:11:12=455], dbName:default, tableName:part_special, createTime:1269537986, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/part_special/ds=2008 04 08/ts=10%3A11%3A12%3D455, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1269537986}) PREHOOK: query: SELECT * FROM part_special WHERE ds='2008 04 08' AND ts = '10:11:12=455' PREHOOK: type: QUERY PREHOOK: Input: default@part_special@ds=2008 04 08/ts=10%3A11%3A12%3D455 -PREHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1606481639/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-26_624_4753762982954764466/10000 POSTHOOK: query: SELECT * FROM part_special WHERE ds='2008 04 08' AND ts = '10:11:12=455' POSTHOOK: type: QUERY POSTHOOK: Input: default@part_special@ds=2008 04 08/ts=10%3A11%3A12%3D455 -POSTHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1606481639/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-26_624_4753762982954764466/10000 +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).b SIMPLE null[] +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).a SIMPLE null[] 1 2 2008 04 08 10:11:12=455 PREHOOK: query: DROP TABLE part_special PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE part_special POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@part_special +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).b SIMPLE null[] +POSTHOOK: Lineage: part_special PARTITION(ds=2008 04 08,ts=10:11:12=455).a SIMPLE null[] Index: ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out (revision 927279) +++ ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out (working copy) @@ -15,12 +15,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_vs_table@ds=100 +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: alter table partition_vs_table add columns (newcol string) PREHOOK: type: ALTERTABLE_ADDCOLS POSTHOOK: query: alter table partition_vs_table add columns (newcol string) POSTHOOK: type: ALTERTABLE_ADDCOLS POSTHOOK: Input: default@partition_vs_table POSTHOOK: Output: default@partition_vs_table +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table partition_vs_table partition(ds='101') select key, value, key from src PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -29,18 +33,28 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@partition_vs_table@ds=101 +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).newcol SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select key, value, newcol from partition_vs_table order by key PREHOOK: type: QUERY PREHOOK: Input: default@partition_vs_table@ds=100 PREHOOK: Input: default@partition_vs_table@ds=101 -PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/tmp/1273555868/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-33_414_1904360188470653161/10000 POSTHOOK: query: select key, value, newcol from partition_vs_table order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@partition_vs_table@ds=100 POSTHOOK: Input: default@partition_vs_table@ds=101 -POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/tmp/1273555868/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-45-33_414_1904360188470653161/10000 +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).newcol SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 NULL 0 val_0 NULL 0 val_0 NULL @@ -1046,3 +1060,8 @@ POSTHOOK: query: drop table partition_vs_table POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@partition_vs_table +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).newcol SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=101).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_vs_table PARTITION(ds=100).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/insertexternal1.q.out =================================================================== --- ql/src/test/results/clientpositive/insertexternal1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/insertexternal1.q.out (working copy) @@ -20,14 +20,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@texternal@insertdate=2008-01-01 +POSTHOOK: Lineage: texternal PARTITION(insertdate=2008-01-01).val SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: texternal PARTITION(insertdate=2008-01-01).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from texternal where insertdate='2008-01-01' PREHOOK: type: QUERY PREHOOK: Input: default@texternal@insertdate=2008-01-01 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1173088928/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-28-45_156_818472487968944372/10000 POSTHOOK: query: select * from texternal where insertdate='2008-01-01' POSTHOOK: type: QUERY POSTHOOK: Input: default@texternal@insertdate=2008-01-01 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1173088928/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-28-45_156_818472487968944372/10000 +POSTHOOK: Lineage: texternal PARTITION(insertdate=2008-01-01).val SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: texternal PARTITION(insertdate=2008-01-01).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 238 val_238 2008-01-01 86 val_86 2008-01-01 311 val_311 2008-01-01 Index: ql/src/test/results/clientpositive/mapreduce8.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce8.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce8.q.out (working copy) @@ -132,14 +132,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.v SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.k SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1655540627/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-38_966_3282323259424956418/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1655540627/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-38_966_3282323259424956418/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.v SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.k SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -72,7 +72,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -83,21 +83,21 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452746 + transient_lastDdlTime 1269539504 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -111,13 +111,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452743 + transient_lastDdlTime 1269539502 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -128,13 +128,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452743 + transient_lastDdlTime 1269539502 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -146,14 +146,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -163,20 +163,20 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452746 + transient_lastDdlTime 1269539504 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -194,9 +194,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -207,12 +207,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452746 + transient_lastDdlTime 1269539504 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -223,12 +223,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452746 + transient_lastDdlTime 1269539504 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -237,7 +237,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-46_652_382701724158806219/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-44_106_4347122727196992888/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -248,12 +248,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452746 + transient_lastDdlTime 1269539504 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -272,14 +272,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)s.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.dt SIMPLE null[(srcpart)s.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)s.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)s.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-51_060_6501845406677175733/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-47_988_8065110656696287432/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-51_060_6501845406677175733/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-47_988_8065110656696287432/10000 +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)s.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.dt SIMPLE null[(srcpart)s.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)s.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)s.FieldSchema(name:ds, type:string, comment:null), ] 238 val_238 2008-04-08 11 86 val_86 2008-04-08 11 311 val_311 2008-04-08 11 @@ -783,9 +791,13 @@ PREHOOK: query: select count(1) from srcbucket PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-51_122_3816438066746502163/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-48_041_950976851312719523/10000 POSTHOOK: query: select count(1) from srcbucket POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-51_122_3816438066746502163/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-48_041_950976851312719523/10000 +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)s.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.dt SIMPLE null[(srcpart)s.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)s.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)s.FieldSchema(name:ds, type:string, comment:null), ] 1000 Index: ql/src/test/results/clientpositive/groupby6_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby6_map_skew.q.out (working copy) @@ -65,7 +65,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/311461407/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-51_993_485239942609586571/10002 Reduce Output Operator key expressions: expr: _col0 @@ -118,14 +118,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/857829651/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-58_230_6094825394639152675/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/857829651/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-58_230_6094825394639152675/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 1 2 Index: ql/src/test/results/clientpositive/groupby1_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_limit.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby1_limit.q.out (working copy) @@ -80,7 +80,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/780128058/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-04_461_2527868150155629277/10002 Reduce Output Operator sort order: tag: -1 @@ -127,14 +127,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1678285663/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-10_816_1860844427701721484/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1678285663/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-10_816_1860844427701721484/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/cast1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/cast1.q.out (working copy) @@ -67,7 +67,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1062243767/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-55_273_3044982737964031435/10000 Stage: Stage-0 Move Operator @@ -82,7 +82,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/493733948/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-55_273_3044982737964031435/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -124,12 +124,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c7 SIMPLE null[] +POSTHOOK: Lineage: dest1.c6 SIMPLE null[] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: select dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1057577384/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-58_977_9095738418569007701/10000 POSTHOOK: query: select dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1057577384/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-58_977_9095738418569007701/10000 +POSTHOOK: Lineage: dest1.c7 SIMPLE null[] +POSTHOOK: Lineage: dest1.c6 SIMPLE null[] +POSTHOOK: Lineage: dest1.c5 SIMPLE null[] +POSTHOOK: Lineage: dest1.c4 SIMPLE null[] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 5 5.0 5.0 5.0 5 true 1 Index: ql/src/test/results/clientpositive/ppd_constant_expr.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_constant_expr.q.out (revision 927279) +++ ql/src/test/results/clientpositive/ppd_constant_expr.q.out (working copy) @@ -62,7 +62,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1294553801/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-38_943_4769732990628638651/10000 Stage: Stage-0 Move Operator @@ -77,7 +77,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1450499892/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-38_943_4769732990628638651/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -113,14 +113,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@ppd_constant_expr +POSTHOOK: Lineage: ppd_constant_expr.c3 SIMPLE null[] +POSTHOOK: Lineage: ppd_constant_expr.c2 SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c1 SIMPLE null[] PREHOOK: query: SELECT ppd_constant_expr.* FROM ppd_constant_expr PREHOOK: type: QUERY PREHOOK: Input: default@ppd_constant_expr -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1399722106/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-42_386_4279889211210642063/10000 POSTHOOK: query: SELECT ppd_constant_expr.* FROM ppd_constant_expr POSTHOOK: type: QUERY POSTHOOK: Input: default@ppd_constant_expr -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1399722106/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-46-42_386_4279889211210642063/10000 +POSTHOOK: Lineage: ppd_constant_expr.c3 SIMPLE null[] +POSTHOOK: Lineage: ppd_constant_expr.c2 SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c1 SIMPLE null[] NULL NULL NULL NULL NULL NULL NULL NULL NULL @@ -151,3 +157,6 @@ POSTHOOK: query: DROP TABLE ppd_constant_expr POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@ppd_constant_expr +POSTHOOK: Lineage: ppd_constant_expr.c3 SIMPLE null[] +POSTHOOK: Lineage: ppd_constant_expr.c2 SIMPLE null[(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c1 SIMPLE null[] Index: ql/src/test/results/clientpositive/combine1.q.out =================================================================== --- ql/src/test/results/clientpositive/combine1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/combine1.q.out (working copy) @@ -17,14 +17,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@combine1_1 +POSTHOOK: Lineage: combine1_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: combine1_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select key, value from combine1_1 PREHOOK: type: QUERY PREHOOK: Input: default@combine1_1 -PREHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/scratchdir/hive_2010-03-12_14-00-56_188_1241439225254397610/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-17_322_3761041260588703533/10000 POSTHOOK: query: select key, value from combine1_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@combine1_1 -POSTHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/scratchdir/hive_2010-03-12_14-00-56_188_1241439225254397610/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-17_322_3761041260588703533/10000 +POSTHOOK: Lineage: combine1_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: combine1_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 @@ -530,3 +534,5 @@ POSTHOOK: query: drop table combine1_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@combine1_1 +POSTHOOK: Lineage: combine1_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: combine1_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby2_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby2_noskew.q.out (working copy) @@ -102,14 +102,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_g2 +POSTHOOK: Lineage: dest_g2.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest_g2.* FROM dest_g2 PREHOOK: type: QUERY PREHOOK: Input: default@dest_g2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1769464573/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-11_869_7765594445282049335/10000 POSTHOOK: query: SELECT dest_g2.* FROM dest_g2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_g2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1769464573/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-11_869_7765594445282049335/10000 +POSTHOOK: Lineage: dest_g2.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_g2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/join31.q.out =================================================================== --- ql/src/test/results/clientpositive/join31.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join31.q.out (working copy) @@ -89,7 +89,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1198542805/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-18_358_6510234074713122644/10002 Common Join Operator condition map: Inner Join 0 to 1 @@ -111,11 +111,11 @@ Local Work: Map Reduce Local Work Alias -> Map Local Tables: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1198542805/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-18_358_6510234074713122644/10004 Fetch Operator limit: -1 Alias -> Map Local Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1198542805/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-18_358_6510234074713122644/10004 Common Join Operator condition map: Inner Join 0 to 1 @@ -138,7 +138,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1198542805/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-18_358_6510234074713122644/10003 Select Operator expressions: expr: _col0 @@ -286,14 +286,18 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.cnt UDAF null[(src1)x.null, (src)y.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/2071762334/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-31_107_4046142146557347168/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/2071762334/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-31_107_4046142146557347168/10000 +POSTHOOK: Lineage: dest_j1.cnt UDAF null[(src1)x.null, (src)y.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 1 146 1 150 1 @@ -314,3 +318,5 @@ POSTHOOK: query: drop TABLE dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.cnt UDAF null[(src1)x.null, (src)y.null, ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input13.q.out =================================================================== --- ql/src/test/results/clientpositive/input13.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input13.q.out (working copy) @@ -153,7 +153,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/192420524/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10000 Stage: Stage-0 Move Operator @@ -168,7 +168,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/621237713/10007 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10007 Reduce Output Operator sort order: Map-reduce partition columns: @@ -198,7 +198,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/192420524/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10002 Stage: Stage-1 Move Operator @@ -213,7 +213,7 @@ Stage: Stage-8 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/621237713/10008 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10008 Reduce Output Operator sort order: Map-reduce partition columns: @@ -243,7 +243,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/192420524/10004 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10004 Stage: Stage-2 Move Operator @@ -261,7 +261,7 @@ Stage: Stage-11 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/621237713/10009 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10009 Reduce Output Operator sort order: Map-reduce partition columns: @@ -289,7 +289,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/192420524/10006 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10006 Stage: Stage-3 Move Operator @@ -300,7 +300,7 @@ Stage: Stage-14 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/621237713/10010 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-22_134_7701660314252137153/10010 Reduce Output Operator sort order: Map-reduce partition columns: @@ -342,14 +342,24 @@ POSTHOOK: Output: default@dest2 POSTHOOK: Output: default@dest3@ds=2008-04-08/hr=12 POSTHOOK: Output: ../build/ql/test/data/warehouse/dest4.out +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/861519410/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-26_949_4521337473628037850/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/861519410/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-26_949_4521337473628037850/10000 +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 27 val_27 98 val_98 @@ -437,11 +447,16 @@ PREHOOK: query: SELECT dest2.* FROM dest2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/724254218/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-26_998_7702626282580383434/10000 POSTHOOK: query: SELECT dest2.* FROM dest2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/724254218/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-26_998_7702626282580383434/10000 +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 165 val_165 193 val_193 150 val_150 @@ -550,11 +565,16 @@ PREHOOK: query: SELECT dest3.* FROM dest3 PREHOOK: type: QUERY PREHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/615841665/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-27_042_9019937236375806538/10000 POSTHOOK: query: SELECT dest3.* FROM dest3 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/615841665/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-27_042_9019937236375806538/10000 +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 238 2008-04-08 12 255 2008-04-08 12 278 2008-04-08 12 Index: ql/src/test/results/clientpositive/input36.q.out =================================================================== --- ql/src/test/results/clientpositive/input36.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input36.q.out (working copy) @@ -80,7 +80,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1863527476/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-14_548_2744879461017544484/10000 Stage: Stage-0 Move Operator @@ -95,7 +95,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/28010069/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-14_548_2744879461017544484/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -139,14 +139,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1881926311/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-17_983_8212708796744576813/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1881926311/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-23-17_983_8212708796744576813/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] NULL NULL NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/mapreduce3.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce3.q.out (working copy) @@ -113,14 +113,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1096120278/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-09_353_3104778690352539455/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1096120278/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-09_353_3104778690352539455/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 0 0 val_0 0 0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/alter3.q.out =================================================================== --- ql/src/test/results/clientpositive/alter3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/alter3.q.out (working copy) @@ -29,14 +29,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alter3_src POSTHOOK: Output: default@alter3@pcol1=test_part/pcol2=test_part +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: select * from alter3 where pcol1='test_part' and pcol2='test_part' PREHOOK: type: QUERY PREHOOK: Input: default@alter3@pcol1=test_part/pcol2=test_part -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1483627653/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-01_729_6910390325308725251/10000 POSTHOOK: query: select * from alter3 where pcol1='test_part' and pcol2='test_part' POSTHOOK: type: QUERY POSTHOOK: Input: default@alter3@pcol1=test_part/pcol2=test_part -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1483627653/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-01_729_6910390325308725251/10000 +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] 1 test_part test_part 2 test_part test_part 3 test_part test_part @@ -49,32 +51,36 @@ POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: default@alter3 POSTHOOK: Output: default@alter3_renamed +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: describe extended alter3_renamed PREHOOK: type: DESCTABLE POSTHOOK: query: describe extended alter3_renamed POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] col1 string pcol1 string pcol2 string -Detailed Table Information Table(tableName:alter3_renamed, dbName:default, owner:njain, createTime:1253779705, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter3_renamed, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:pcol1, type:string, comment:null), FieldSchema(name:pcol2, type:string, comment:null)], parameters:{last_modified_by=njain,last_modified_time=1253779709}) +Detailed Table Information Table(tableName:alter3_renamed, dbName:default, owner:athusoo, createTime:1269536278, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/alter3_renamed, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:pcol1, type:string, comment:null), FieldSchema(name:pcol2, type:string, comment:null)], parameters:{last_modified_by=athusoo,last_modified_time=1269536281,transient_lastDdlTime=1269536281}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: describe extended alter3_renamed partition (pCol1='test_part', pcol2='test_part') PREHOOK: type: DESCTABLE POSTHOOK: query: describe extended alter3_renamed partition (pCol1='test_part', pcol2='test_part') POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] col1 string pcol1 string pcol2 string -Detailed Partition Information Partition(values:[test_part, test_part], dbName:default, tableName:alter3_renamed, createTime:0, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter3_renamed/pcol1=test_part/pcol2=test_part, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{}) +Detailed Partition Information Partition(values:[test_part, test_part], dbName:default, tableName:alter3_renamed, createTime:1269536281, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/alter3_renamed/pcol1=test_part/pcol2=test_part, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1269536281}) PREHOOK: query: select * from alter3_renamed where pcol1='test_part' and pcol2='test_part' PREHOOK: type: QUERY PREHOOK: Input: default@alter3_renamed@pcol1=test_part/pcol2=test_part -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/250127671/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-02_179_1592306115281092982/10000 POSTHOOK: query: select * from alter3_renamed where pcol1='test_part' and pcol2='test_part' POSTHOOK: type: QUERY POSTHOOK: Input: default@alter3_renamed@pcol1=test_part/pcol2=test_part -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/250127671/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-02_179_1592306115281092982/10000 +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] 1 test_part test_part 2 test_part test_part 3 test_part test_part @@ -86,12 +92,15 @@ POSTHOOK: query: drop table alter3_src POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@alter3_src +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: drop table alter3 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table alter3 POSTHOOK: type: DROPTABLE +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] PREHOOK: query: drop table alter3_renamed PREHOOK: type: DROPTABLE POSTHOOK: query: drop table alter3_renamed POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@alter3_renamed +POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part,pcol2=test_part).col1 SIMPLE null[(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/groupby4_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby4_map_skew.q.out (working copy) @@ -81,12 +81,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.null, ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/503771087/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-04_366_4392515071330554146/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/503771087/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-04_366_4392515071330554146/10000 +POSTHOOK: Lineage: dest1.key UDAF null[(src)src.null, ] 500 Index: ql/src/test/results/clientpositive/bucket4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucket4.q.out (working copy) @@ -52,9 +52,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -65,12 +65,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133823 + transient_lastDdlTime 1269536327 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -81,12 +81,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133823 + transient_lastDdlTime 1269536327 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -102,7 +102,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-37-04_798_2890848395538612897/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-48_463_2463667137224649955/10000 NumFilesPerFileSink: 2 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -115,12 +115,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket4_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket4_1 name bucket4_1 serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133824 + transient_lastDdlTime 1269536328 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket4_1 TotalFiles: 2 @@ -130,7 +130,7 @@ Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-37-04_798_2890848395538612897/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-48_463_2463667137224649955/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -142,15 +142,15 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket4_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket4_1 name bucket4_1 serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1267133824 + transient_lastDdlTime 1269536328 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket4_1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-37-04_798_2890848395538612897/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-48_463_2463667137224649955/10001 PREHOOK: query: insert overwrite table bucket4_1 @@ -163,12 +163,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bucket4_1 +POSTHOOK: Lineage: bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select * from bucket4_1 tablesample (bucket 1 out of 2) s PREHOOK: type: QUERY POSTHOOK: query: explain select * from bucket4_1 tablesample (bucket 1 out of 2) s POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF bucket4_1 (TOK_TABLESAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) @@ -213,11 +217,13 @@ PREHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s PREHOOK: type: QUERY PREHOOK: Input: default@bucket4_1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-37-09_385_8660032017511325986/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-52_051_811366469333922708/10000 POSTHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s POSTHOOK: type: QUERY POSTHOOK: Input: default@bucket4_1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-25_13-37-09_385_8660032017511325986/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-52_051_811366469333922708/10000 +POSTHOOK: Lineage: bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -470,3 +476,5 @@ POSTHOOK: query: drop table bucket4_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucket4_1 +POSTHOOK: Lineage: bucket4_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket4_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby7_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby7_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby7_noskew.q.out (working copy) @@ -112,7 +112,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/951314625/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-44_634_378927450294624248/10004 Reduce Output Operator key expressions: expr: key @@ -183,14 +183,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1116174045/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-51_148_6222244937071504344/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1116174045/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-51_148_6222244937071504344/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 @@ -503,11 +511,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1736231197/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-51_198_5140163112540321708/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1736231197/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-51_198_5140163112540321708/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/groupby6_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby6_map.q.out (working copy) @@ -89,14 +89,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2021418754/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-49_669_7321961392705029941/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/2021418754/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-49_669_7321961392705029941/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 1 2 Index: ql/src/test/results/clientpositive/join5.q.out =================================================================== --- ql/src/test/results/clientpositive/join5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join5.q.out (working copy) @@ -202,14 +202,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1233073400/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-13_395_2145974329014213142/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1233073400/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-35-13_395_2145974329014213142/10000 +POSTHOOK: Lineage: dest1.c4 SIMPLE null[(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 SIMPLE null[(src)src2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(src)src1.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src1.FieldSchema(name:key, type:string, comment:default), ] 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 Index: ql/src/test/results/clientpositive/transform1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/transform1.q.out (working copy) @@ -54,11 +54,11 @@ PREHOOK: query: SELECT transform(*) USING 'cat' AS (col array) FROM transform1_t1 PREHOOK: type: QUERY PREHOOK: Input: default@transform1_t1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1737132015/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-45_232_2934599433172896677/10000 POSTHOOK: query: SELECT transform(*) USING 'cat' AS (col array) FROM transform1_t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@transform1_t1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1737132015/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-45_232_2934599433172896677/10000 PREHOOK: query: drop table transform1_t1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table transform1_t1 @@ -83,12 +83,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@transform1_t2 +POSTHOOK: Lineage: transform1_t2.col SIMPLE null[] PREHOOK: query: EXPLAIN SELECT transform('0\0021\0022') USING 'cat' AS (col array) FROM transform1_t2 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT transform('0\0021\0022') USING 'cat' AS (col array) FROM transform1_t2 POSTHOOK: type: QUERY +POSTHOOK: Lineage: transform1_t2.col SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF transform1_t2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST '0\0021\0022') TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER (TOK_TABCOLLIST (TOK_TABCOL col (TOK_LIST TOK_INT)))))))) @@ -128,14 +130,16 @@ PREHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array) FROM transform1_t2 PREHOOK: type: QUERY PREHOOK: Input: default@transform1_t2 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/389439457/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-51_452_4290629570039348011/10000 POSTHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array) FROM transform1_t2 POSTHOOK: type: QUERY POSTHOOK: Input: default@transform1_t2 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/389439457/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-08-51_452_4290629570039348011/10000 +POSTHOOK: Lineage: transform1_t2.col SIMPLE null[] [0,1,2] PREHOOK: query: drop table transform1_t2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table transform1_t2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@transform1_t2 +POSTHOOK: Lineage: transform1_t2.col SIMPLE null[] Index: ql/src/test/results/clientpositive/fileformat_text.q.out =================================================================== --- ql/src/test/results/clientpositive/fileformat_text.q.out (revision 927279) +++ ql/src/test/results/clientpositive/fileformat_text.q.out (working copy) @@ -43,7 +43,7 @@ key int value string -Detailed Table Information Table(tableName:dest1, dbName:default, owner:njain, createTime:1253779886, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{}) +Detailed Table Information Table(tableName:dest1, dbName:default, owner:athusoo, createTime:1269537191, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537191}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10 PREHOOK: type: QUERY @@ -54,14 +54,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1843819480/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-15_160_838086785449219893/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1843819480/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-15_160_838086785449219893/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 4 val_4 8 val_8 @@ -77,3 +81,5 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/union18.q.out =================================================================== --- ql/src/test/results/clientpositive/union18.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union18.q.out (working copy) @@ -94,7 +94,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/741093659/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-13_695_642197799374385111/10004 Union Select Operator expressions: @@ -128,7 +128,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/741093659/10007 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-13_695_642197799374385111/10007 Union Select Operator expressions: @@ -170,7 +170,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/599903239/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-13_695_642197799374385111/10000 Stage: Stage-0 Move Operator @@ -185,7 +185,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/741093659/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-13_695_642197799374385111/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -215,7 +215,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/599903239/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-13_695_642197799374385111/10002 Stage: Stage-1 Move Operator @@ -230,7 +230,7 @@ Stage: Stage-7 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/741093659/10006 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-13_695_642197799374385111/10006 Reduce Output Operator sort order: Map-reduce partition columns: @@ -294,14 +294,24 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1342633477/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-29_316_6769297367993328194/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1342633477/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-29_316_6769297367993328194/10000 +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -806,11 +816,16 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1450263550/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-32_324_5786995039094365124/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1450263550/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-29-32_324_5786995039094365124/10000 +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 @@ -1317,8 +1332,18 @@ POSTHOOK: query: drop table DEST1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table DEST2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table DEST2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.val2 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.val1 SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SET null[(src)s1.null, (src)s2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SET null[(src)s2.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input31.q.out =================================================================== --- ql/src/test/results/clientpositive/input31.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input31.q.out (working copy) @@ -106,22 +106,26 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@tst_dest31 +POSTHOOK: Lineage: tst_dest31.a UDAF null[(srcbucket)srcbucket.null, ] PREHOOK: query: select * from tst_dest31 PREHOOK: type: QUERY PREHOOK: Input: default@tst_dest31 -PREHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/305456482/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-45_899_5401786468455610283/10000 POSTHOOK: query: select * from tst_dest31 POSTHOOK: type: QUERY POSTHOOK: Input: default@tst_dest31 -POSTHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/305456482/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-45_899_5401786468455610283/10000 +POSTHOOK: Lineage: tst_dest31.a UDAF null[(srcbucket)srcbucket.null, ] 493 PREHOOK: query: drop table tst_dest31 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tst_dest31 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tst_dest31 +POSTHOOK: Lineage: tst_dest31.a UDAF null[(srcbucket)srcbucket.null, ] PREHOOK: query: drop table dest31 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dest31 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest31 +POSTHOOK: Lineage: tst_dest31.a UDAF null[(srcbucket)srcbucket.null, ] Index: ql/src/test/results/clientpositive/udf8.q.out =================================================================== --- ql/src/test/results/clientpositive/udf8.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf8.q.out (working copy) @@ -11,6 +11,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT '1' WHERE src.key = 86 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -19,12 +20,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT avg(c1), sum(c1), count(c1) FROM dest1 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT avg(c1), sum(c1), count(c1) FROM dest1 POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION avg (TOK_TABLE_OR_COL c1))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL c1))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL c1)))))) @@ -95,9 +100,11 @@ PREHOOK: query: SELECT avg(c1), sum(c1), count(c1) FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1261922644/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-56_303_229606906273980620/10000 POSTHOOK: query: SELECT avg(c1), sum(c1), count(c1) FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1261922644/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-56_303_229606906273980620/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 1.0 1.0 1 Index: ql/src/test/results/clientpositive/udf_reverse.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reverse.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_reverse.q.out (working copy) @@ -58,7 +58,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/472117671/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-20-56_575_7125961792098775360/10000 Stage: Stage-0 Move Operator @@ -73,7 +73,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1252154601/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-20-56_575_7125961792098775360/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -103,14 +103,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/186785883/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-20-59_739_5829087754686864374/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/186785883/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-20-59_739_5829087754686864374/10000 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] 832_lav 113_lav @@ -141,6 +143,7 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: -- Test with non-ascii characters -- kv4.txt contains the text 0xE982B5E993AE, which should be reversed to -- 0xE993AEE982B5 @@ -152,22 +155,26 @@ CREATE TABLE dest1(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1 PREHOOK: type: LOAD POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1 POSTHOOK: type: LOAD POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT count(1) FROM dest1 WHERE reverse(dest1.name) = _UTF-8 0xE993AEE982B5 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1359492114/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-20-59_960_1471054151305905623/10000 POSTHOOK: query: SELECT count(1) FROM dest1 WHERE reverse(dest1.name) = _UTF-8 0xE993AEE982B5 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1359492114/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-20-59_960_1471054151305905623/10000 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] 1 PREHOOK: query: DROP TABLE dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.len SIMPLE null[(src1)src1.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/union3.q.out =================================================================== --- ql/src/test/results/clientpositive/union3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union3.q.out (working copy) @@ -88,7 +88,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2013565127/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-57_619_6858206914822259707/10002 Union Select Operator expressions: @@ -101,7 +101,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2013565127/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-57_619_6858206914822259707/10003 Union Select Operator expressions: @@ -114,7 +114,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2013565127/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-57_619_6858206914822259707/10005 Union Select Operator expressions: @@ -127,7 +127,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2013565127/10007 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-57_619_6858206914822259707/10007 Union Select Operator expressions: @@ -218,7 +218,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2013565127/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-57_619_6858206914822259707/10004 Reduce Output Operator key expressions: expr: _col0 @@ -280,7 +280,7 @@ Stage: Stage-7 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/2013565127/10006 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-57_619_6858206914822259707/10006 Reduce Output Operator key expressions: expr: _col0 @@ -356,14 +356,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@union_out +POSTHOOK: Lineage: union_out.id SET null[] PREHOOK: query: select * from union_out cluster by id PREHOOK: type: QUERY PREHOOK: Input: default@union_out -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/63213254/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-23_332_7132895334401217920/10000 POSTHOOK: query: select * from union_out cluster by id POSTHOOK: type: QUERY POSTHOOK: Input: default@union_out -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/63213254/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-31-23_332_7132895334401217920/10000 +POSTHOOK: Lineage: union_out.id SET null[] 1 2 3 Index: ql/src/test/results/clientpositive/binary_output_format.q.out =================================================================== --- ql/src/test/results/clientpositive/binary_output_format.q.out (revision 927279) +++ ql/src/test/results/clientpositive/binary_output_format.q.out (working copy) @@ -92,7 +92,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -103,22 +103,22 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266449090 + transient_lastDdlTime 1269536285 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -129,12 +129,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266449089 + transient_lastDdlTime 1269536284 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -145,12 +145,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266449089 + transient_lastDdlTime 1269536284 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -162,14 +162,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat @@ -179,21 +179,21 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266449090 + transient_lastDdlTime 1269536285 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -205,9 +205,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,13 +218,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266449090 + transient_lastDdlTime 1269536285 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -235,13 +235,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266449090 + transient_lastDdlTime 1269536285 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -250,7 +250,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-50_051_1929056605300008322/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-05_504_1541311745880864589/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -261,13 +261,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266449090 + transient_lastDdlTime 1269536285 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -302,16 +302,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.mydata SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: -- Test the result SELECT * FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-54_510_6167292116608425875/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-08_954_1377255331413077086/10000 POSTHOOK: query: -- Test the result SELECT * FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-24-54_510_6167292116608425875/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-58-08_954_1377255331413077086/10000 +POSTHOOK: Lineage: dest1.mydata SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 @@ -817,3 +819,4 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.mydata SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input1_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input1_limit.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input1_limit.q.out (working copy) @@ -104,7 +104,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive5/hive5/build/ql/tmp/1197694849/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-12_466_3269806107543521892/10004 Reduce Output Operator sort order: tag: -1 @@ -157,14 +157,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1527068792/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-19_118_1065809602893758175/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1527068792/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-19_118_1065809602893758175/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 27 val_27 98 val_98 @@ -178,11 +186,15 @@ PREHOOK: query: SELECT dest2.* FROM dest2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/651013402/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-19_160_6304679461763378023/10000 POSTHOOK: query: SELECT dest2.* FROM dest2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/651013402/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-21-19_160_6304679461763378023/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 val_86 27 val_27 98 val_98 @@ -193,8 +205,16 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: DROP TABLE dest2 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/multi_insert.q.out (revision 927279) +++ ql/src/test/results/clientpositive/multi_insert.q.out (working copy) @@ -115,14 +115,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-29_447_2036367580743875308/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-06_324_3032984390143335147/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-29_447_2036367580743875308/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-06_324_3032984390143335147/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -136,11 +144,15 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-35_523_32784124749201803/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-09_213_7005710957544220348/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-35_523_32784124749201803/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-09_213_7005710957544220348/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 12 val_12 12 val_12 @@ -160,6 +172,10 @@ insert overwrite table src_multi1 select * where key < 10 insert overwrite table src_multi2 select * where key > 10 and key < 20 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) @@ -227,7 +243,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-39_460_4688171279166647380/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-12_224_5831320197752608786/10000 Stage: Stage-0 Move Operator @@ -242,7 +258,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-39_460_4688171279166647380/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-12_224_5831320197752608786/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -272,7 +288,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-39_460_4688171279166647380/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-12_224_5831320197752608786/10002 Stage: Stage-1 Move Operator @@ -287,7 +303,7 @@ Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-39_460_4688171279166647380/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-12_224_5831320197752608786/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -325,14 +341,30 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-44_471_7177072664819434808/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-15_997_9092442535901994406/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-44_471_7177072664819434808/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-15_997_9092442535901994406/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -346,11 +378,19 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-48_776_4746714833769657150/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-18_913_7330983041278173292/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-48_776_4746714833769657150/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-18_913_7330983041278173292/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 12 val_12 12 val_12 @@ -370,6 +410,14 @@ insert overwrite table src_multi1 select * where key < 10 insert overwrite table src_multi2 select * where key > 10 and key < 20 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) @@ -459,14 +507,38 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-57_603_1895139113884971235/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-25_230_5128935787339667236/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-29-57_603_1895139113884971235/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-25_230_5128935787339667236/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -480,11 +552,23 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-01_874_6248094955310923547/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-28_288_3209102960310805735/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-01_874_6248094955310923547/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-28_288_3209102960310805735/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 12 val_12 12 val_12 @@ -504,6 +588,18 @@ insert overwrite table src_multi1 select * where key < 10 insert overwrite table src_multi2 select * where key > 10 and key < 20 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) @@ -571,7 +667,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-07_533_8338685977513653800/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-31_296_4859291832644598138/10000 Stage: Stage-0 Move Operator @@ -586,7 +682,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-07_533_8338685977513653800/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-31_296_4859291832644598138/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -616,7 +712,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-07_533_8338685977513653800/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-31_296_4859291832644598138/10002 Stage: Stage-1 Move Operator @@ -631,7 +727,7 @@ Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-07_533_8338685977513653800/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-31_296_4859291832644598138/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -669,14 +765,46 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-11_925_7317892190390720566/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-35_067_4984967040908933919/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-11_925_7317892190390720566/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-35_067_4984967040908933919/10000 +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -690,11 +818,27 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-15_939_7123575127159240513/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-37_974_2427903918851113567/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-15_939_7123575127159240513/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-37_974_2427903918851113567/10000 +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 12 val_12 12 val_12 @@ -714,6 +858,22 @@ insert overwrite table src_multi1 select * where key < 10 group by key, value insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10)) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) @@ -828,7 +988,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-21_064_1493440255317625555/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-40_925_3865624659304518433/10004 Reduce Output Operator key expressions: expr: _col0 @@ -893,14 +1053,54 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-31_604_6854725471079318857/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-47_631_3323967252608342012/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-31_604_6854725471079318857/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-47_631_3323967252608342012/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2 val_2 4 val_4 @@ -910,11 +1110,31 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-35_764_8471657074667538067/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-50_544_5414421862938725198/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-35_764_8471657074667538067/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-50_544_5414421862938725198/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 12 val_12 15 val_15 @@ -931,6 +1151,26 @@ insert overwrite table src_multi1 select * where key < 10 group by key, value insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10)) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) @@ -1045,7 +1285,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-40_143_8909646046511921825/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-53_447_8154598049010272691/10000 Stage: Stage-0 Move Operator @@ -1060,7 +1300,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-40_143_8909646046511921825/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-53_447_8154598049010272691/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -1086,7 +1326,7 @@ Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-40_143_8909646046511921825/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-53_447_8154598049010272691/10005 Reduce Output Operator key expressions: expr: _col0 @@ -1133,7 +1373,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-40_143_8909646046511921825/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-53_447_8154598049010272691/10002 Stage: Stage-1 Move Operator @@ -1148,7 +1388,7 @@ Stage: Stage-7 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-40_143_8909646046511921825/10006 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-40-53_447_8154598049010272691/10006 Reduce Output Operator sort order: Map-reduce partition columns: @@ -1186,14 +1426,62 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-50_895_7913352536521390096/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-00_562_7368058132052636120/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-50_895_7913352536521390096/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-00_562_7368058132052636120/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 2 val_2 4 val_4 @@ -1203,11 +1491,35 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-56_590_2617976148457105571/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-03_520_638034806860825717/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-30-56_590_2617976148457105571/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-03_520_638034806860825717/10000 +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 11 val_11 12 val_12 15 val_15 @@ -1224,6 +1536,30 @@ insert overwrite table src_multi1 select * where key < 10 group by key, value insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10)) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) @@ -1338,7 +1674,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-01_349_1128452962511025567/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-06_417_6107762879789483604/10004 Reduce Output Operator key expressions: expr: _col0 @@ -1403,14 +1739,70 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-10_709_7113379560787468211/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-13_151_1215173382009660602/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-10_709_7113379560787468211/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-13_151_1215173382009660602/10000 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2 val_2 4 val_4 @@ -1420,11 +1812,39 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-15_222_6577306322581832665/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-16_157_8336417296433313862/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-15_222_6577306322581832665/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-16_157_8336417296433313862/10000 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 12 val_12 15 val_15 @@ -1441,6 +1861,34 @@ insert overwrite table src_multi1 select * where key < 10 group by key, value insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10)) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) @@ -1555,7 +2003,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-19_213_675301272582900632/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-19_163_1303761818889951478/10000 Stage: Stage-0 Move Operator @@ -1570,7 +2018,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-19_213_675301272582900632/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-19_163_1303761818889951478/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -1596,7 +2044,7 @@ Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-19_213_675301272582900632/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-19_163_1303761818889951478/10005 Reduce Output Operator key expressions: expr: _col0 @@ -1643,7 +2091,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-19_213_675301272582900632/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-19_163_1303761818889951478/10002 Stage: Stage-1 Move Operator @@ -1658,7 +2106,7 @@ Stage: Stage-7 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-19_213_675301272582900632/10006 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-19_163_1303761818889951478/10006 Reduce Output Operator sort order: Map-reduce partition columns: @@ -1696,14 +2144,78 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-29_456_7869097210136503483/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-26_385_3053209301607671929/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-29_456_7869097210136503483/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-26_385_3053209301607671929/10000 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2 val_2 4 val_4 @@ -1713,11 +2225,43 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-33_739_9167469421754848388/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-29_417_5156136751020998081/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-33_739_9167469421754848388/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-29_417_5156136751020998081/10000 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 12 val_12 15 val_15 @@ -1734,6 +2278,38 @@ insert overwrite table src_multi1 select * where key < 10 insert overwrite table src_multi2 select * where key > 10 and key < 20 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) @@ -1880,14 +2456,86 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-43_798_871047497263397499/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-36_041_3341936623191073240/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-43_798_871047497263397499/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-36_041_3341936623191073240/10000 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -1911,11 +2559,47 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-47_915_3175955014497083648/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-39_038_4211891629374459016/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-47_915_3175955014497083648/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-39_038_4211891629374459016/10000 +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 11 val_11 12 val_12 @@ -1944,6 +2628,42 @@ insert overwrite table src_multi1 select * where key < 10 insert overwrite table src_multi2 select * where key > 10 and key < 20 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) @@ -2068,7 +2788,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-52_263_6047051261705597781/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-42_036_8404844913314885773/10000 Stage: Stage-0 Move Operator @@ -2083,7 +2803,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-52_263_6047051261705597781/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-42_036_8404844913314885773/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -2113,7 +2833,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-52_263_6047051261705597781/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-42_036_8404844913314885773/10002 Stage: Stage-1 Move Operator @@ -2128,7 +2848,7 @@ Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-52_263_6047051261705597781/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-42_036_8404844913314885773/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -2166,14 +2886,94 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-57_008_8643336423808686678/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-46_025_2844394815545304142/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-31-57_008_8643336423808686678/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-46_025_2844394815545304142/10000 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -2197,11 +2997,51 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-00_850_1477714547871107054/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-49_044_4345977347123139490/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-00_850_1477714547871107054/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-49_044_4345977347123139490/10000 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 11 val_11 12 val_12 @@ -2230,6 +3070,46 @@ insert overwrite table src_multi1 select * where key < 10 insert overwrite table src_multi2 select * where key > 10 and key < 20 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) @@ -2376,14 +3256,102 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-10_489_5856917420683506649/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-55_652_6360416452575555132/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-10_489_5856917420683506649/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-55_652_6360416452575555132/10000 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -2407,11 +3375,55 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-14_242_705177908534153271/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-58_560_9038729313316191615/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-14_242_705177908534153271/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-41-58_560_9038729313316191615/10000 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 11 val_11 12 val_12 @@ -2440,6 +3452,50 @@ insert overwrite table src_multi1 select * where key < 10 insert overwrite table src_multi2 select * where key > 10 and key < 20 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB src_multi2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) @@ -2564,7 +3620,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-19_769_955320895324958853/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-01_539_3690320369415625900/10000 Stage: Stage-0 Move Operator @@ -2579,7 +3635,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-19_769_955320895324958853/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-01_539_3690320369415625900/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -2609,7 +3665,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-19_769_955320895324958853/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-01_539_3690320369415625900/10002 Stage: Stage-1 Move Operator @@ -2624,7 +3680,7 @@ Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-19_769_955320895324958853/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-01_539_3690320369415625900/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -2662,14 +3718,110 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@src_multi1 POSTHOOK: Output: default@src_multi2 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from src_multi1 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-24_858_7815198886293357837/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-05_611_3257635371822647837/10000 POSTHOOK: query: select * from src_multi1 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-24_858_7815198886293357837/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-05_611_3257635371822647837/10000 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -2693,11 +3845,59 @@ PREHOOK: query: select * from src_multi2 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@src_multi2 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-29_369_2079249453992062976/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-08_568_478250672033282063/10000 POSTHOOK: query: select * from src_multi2 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src_multi2 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-32-29_369_2079249453992062976/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-42-08_568_478250672033282063/10000 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 11 val_11 11 val_11 12 val_12 @@ -2728,6 +3928,54 @@ insert overwrite local directory '/tmp/hive_test/multiins_local/2' select * where key = 2 insert overwrite local directory '/tmp/hive_test/multiins_local/4' select * where key = 4 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/0')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/2')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 2))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/4')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 4)))) @@ -2833,6 +4081,54 @@ POSTHOOK: Output: /tmp/hive_test/multiins_local/0 POSTHOOK: Output: /tmp/hive_test/multiins_local/2 POSTHOOK: Output: /tmp/hive_test/multiins_local/4 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 2 4 @@ -2848,6 +4144,54 @@ insert overwrite local directory '/tmp/hive_test/multiins_local/2' select * where key = 2 insert overwrite local directory '/tmp/hive_test/multiins_local/4' select * where key = 4 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/0')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/2')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 2))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/4')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 4)))) @@ -2953,6 +4297,54 @@ POSTHOOK: Output: /tmp/hive_test/multiins_local/0 POSTHOOK: Output: /tmp/hive_test/multiins_local/2 POSTHOOK: Output: /tmp/hive_test/multiins_local/4 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 2 4 @@ -2968,6 +4360,54 @@ insert overwrite local directory '/tmp/hive_test/multiins_local/2' select * where key = 2 insert overwrite local directory '/tmp/hive_test/multiins_local/4' select * where key = 4 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/0')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/2')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 2))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/4')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 4)))) @@ -3073,6 +4513,54 @@ POSTHOOK: Output: /tmp/hive_test/multiins_local/0 POSTHOOK: Output: /tmp/hive_test/multiins_local/2 POSTHOOK: Output: /tmp/hive_test/multiins_local/4 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 2 4 @@ -3088,6 +4576,54 @@ insert overwrite local directory '/tmp/hive_test/multiins_local/2' select * where key = 2 insert overwrite local directory '/tmp/hive_test/multiins_local/4' select * where key = 4 POSTHOOK: type: QUERY +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/0')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/2')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 2))) (TOK_INSERT (TOK_DESTINATION (TOK_LOCAL_DIR '/tmp/hive_test/multiins_local/4')) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 4)))) @@ -3193,6 +4729,54 @@ POSTHOOK: Output: /tmp/hive_test/multiins_local/0 POSTHOOK: Output: /tmp/hive_test/multiins_local/2 POSTHOOK: Output: /tmp/hive_test/multiins_local/4 +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SET null[(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SET null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: src_multi2.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 2 4 Index: ql/src/test/results/clientpositive/groupby8.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby8.q.out (working copy) @@ -83,7 +83,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/596894121/10004 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-53_587_4509852210806641206/10004 Reduce Output Operator key expressions: expr: _col0 @@ -142,7 +142,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/596894121/10005 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-17-53_587_4509852210806641206/10005 Reduce Output Operator key expressions: expr: _col0 @@ -213,14 +213,22 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1063984630/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-03_160_9033746331633284182/10000 POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1063984630/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-03_160_9033746331633284182/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 @@ -533,11 +541,15 @@ PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1080526993/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-03_211_8227599068679993241/10000 POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1080526993/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-18-03_211_8227599068679993241/10000 +POSTHOOK: Lineage: dest2.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest2.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/case_sensitivity.q.out =================================================================== --- ql/src/test/results/clientpositive/case_sensitivity.q.out (revision 927279) +++ ql/src/test/results/clientpositive/case_sensitivity.q.out (working copy) @@ -59,7 +59,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1283457982/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-49_523_3779973660301224636/10000 Stage: Stage-0 Move Operator @@ -74,7 +74,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/34390708/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-49_523_3779973660301224636/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -108,14 +108,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT DEST1.* FROM Dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/2134218659/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-52_954_6192633791496744283/10000 POSTHOOK: query: SELECT DEST1.* FROM Dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/2134218659/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-07-52_954_6192633791496744283/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 2 1 4 8 6 27 Index: ql/src/test/results/clientpositive/scriptfile1.q.out =================================================================== --- ql/src/test/results/clientpositive/scriptfile1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/scriptfile1.q.out (working copy) @@ -23,14 +23,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/183755594/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-53-53_763_878110632388086693/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/183755594/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-53-53_763_878110632388086693/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 10 val_10 100 val_100 100 val_100 Index: ql/src/test/results/clientpositive/groupby2_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2_map_skew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby2_map_skew.q.out (working copy) @@ -84,7 +84,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/715055378/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-14-59_604_6664655294860261108/10002 Reduce Output Operator key expressions: expr: _col0 @@ -158,14 +158,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1863077019/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-06_107_6782261372481818348/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1863077019/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-06_107_6782261372481818348/10000 +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/groupby3_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3_map.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby3_map.q.out (working copy) @@ -195,17 +195,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/913967104/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-27_930_4959100506272997278/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/913967104/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-27_930_4959100506272997278/10000 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 130091.0 260.182 256.10355987055016 98.0 0.0 142.9268095075238 143.06995106518906 20428.072876 20469.01089779559 PREHOOK: query: DROP TABLE dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 927279) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -57,7 +57,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -68,21 +68,21 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452830 + transient_lastDdlTime 1269539572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition base file name: srcbucket0.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -94,12 +94,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452828 + transient_lastDdlTime 1269539571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -111,12 +111,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452828 + transient_lastDdlTime 1269539571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -128,14 +128,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -145,20 +145,20 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452830 + transient_lastDdlTime 1269539572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -172,9 +172,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -185,12 +185,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452830 + transient_lastDdlTime 1269539572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -201,12 +201,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452830 + transient_lastDdlTime 1269539572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -215,7 +215,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-10_514_4005967619059829445/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-52_245_1438471704033374374/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -226,12 +226,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452830 + transient_lastDdlTime 1269539572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -250,14 +250,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-14_676_891090271034304445/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-55_803_4344430009835584713/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-27-14_676_891090271034304445/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-52-55_803_4344430009835584713/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 468 val_469 272 val_273 448 val_449 Index: ql/src/test/results/clientpositive/join28.q.out =================================================================== --- ql/src/test/results/clientpositive/join28.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join28.q.out (working copy) @@ -222,7 +222,7 @@ Move Operator files: hdfs directory: true - destination: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_16-22-31_248_3138742764068185194/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-35_486_6889909866480414128/10000 Stage: Stage-0 Move Operator @@ -237,7 +237,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_16-22-31_248_3138742764068185194/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-35_486_6889909866480414128/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -283,14 +283,18 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SET null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_16-22-38_314_5886463496780818123/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-39_991_2147807964508756286/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_16-22-38_314_5886463496780818123/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-31-39_991_2147807964508756286/10000 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SET null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 val_128 128 val_128 128 val_128 @@ -403,3 +407,5 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SET null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/rcfile_union.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_union.q.out (revision 927279) +++ ql/src/test/results/clientpositive/rcfile_union.q.out (working copy) @@ -23,20 +23,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@rcfile_uniontable +POSTHOOK: Lineage: rcfile_uniontable.c SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfile_uniontable.b SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM ( SELECT b AS cola FROM rcfile_unionTable UNION ALL SELECT c AS cola FROM rcfile_unionTable) s PREHOOK: type: QUERY PREHOOK: Input: default@rcfile_uniontable -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1325887249/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-44_984_8522060140985425146/10000 POSTHOOK: query: SELECT * FROM ( SELECT b AS cola FROM rcfile_unionTable UNION ALL SELECT c AS cola FROM rcfile_unionTable) s POSTHOOK: type: QUERY POSTHOOK: Input: default@rcfile_uniontable -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1325887249/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-50-44_984_8522060140985425146/10000 +POSTHOOK: Lineage: rcfile_uniontable.c SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfile_uniontable.b SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] val_238 238 val_86 @@ -62,3 +66,5 @@ POSTHOOK: query: DROP TABLE rcfile_unionTable POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@rcfile_uniontable +POSTHOOK: Lineage: rcfile_uniontable.c SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: rcfile_uniontable.b SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/fileformat_sequencefile.q.out =================================================================== --- ql/src/test/results/clientpositive/fileformat_sequencefile.q.out (revision 927279) +++ ql/src/test/results/clientpositive/fileformat_sequencefile.q.out (working copy) @@ -43,7 +43,7 @@ key int value string -Detailed Table Information Table(tableName:dest1, dbName:default, owner:njain, createTime:1253779880, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{}) +Detailed Table Information Table(tableName:dest1, dbName:default, owner:athusoo, createTime:1269537186, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1269537186}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10 PREHOOK: type: QUERY @@ -54,14 +54,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/862471397/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-09_471_8080029814035802688/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/862471397/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-13-09_471_8080029814035802688/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 4 val_4 8 val_8 @@ -77,3 +81,5 @@ POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/create_escape.q.out =================================================================== --- ql/src/test/results/clientpositive/create_escape.q.out (revision 927279) +++ ql/src/test/results/clientpositive/create_escape.q.out (working copy) @@ -32,7 +32,7 @@ a string b string -Detailed Table Information Table(tableName:table1, dbName:default, owner:njain, createTime:1253779812, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=9,escape.delim=\,field.delim= }), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{}) +Detailed Table Information Table(tableName:table1, dbName:default, owner:athusoo, createTime:1269536972, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format= ,escape.delim=\,field.delim= PREHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key, '\\\t\\' FROM src WHERE key = 86 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -41,17 +41,23 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.b SIMPLE null[] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT * FROM table1 PREHOOK: type: QUERY PREHOOK: Input: default@table1 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1244934892/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-35_402_5823019231072432343/10000 POSTHOOK: query: SELECT * FROM table1 POSTHOOK: type: QUERY POSTHOOK: Input: default@table1 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1244934892/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-09-35_402_5823019231072432343/10000 +POSTHOOK: Lineage: table1.b SIMPLE null[] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 86 \ \ PREHOOK: query: DROP TABLE table1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE table1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@table1 +POSTHOOK: Lineage: table1.b SIMPLE null[] +POSTHOOK: Lineage: table1.a SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input5.q.out =================================================================== --- ql/src/test/results/clientpositive/input5.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input5.q.out (working copy) @@ -110,14 +110,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1949552332/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-05_633_2552077931504702376/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1949552332/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-25-05_633_2552077931504702376/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] [0,0,0] [{"myint":0,"mystring":"0","underscore_int":0}] [1,2,3] [{"myint":1,"mystring":"1","underscore_int":1}] [2,4,6] [{"myint":4,"mystring":"8","underscore_int":2}] Index: ql/src/test/results/clientpositive/input_testsequencefile.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testsequencefile.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_testsequencefile.q.out (working copy) @@ -58,7 +58,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/215339388/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-25_785_5401216071126108274/10000 Stage: Stage-0 Move Operator @@ -73,7 +73,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1222374643/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-25_785_5401216071126108274/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -107,14 +107,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest4_sequencefile +POSTHOOK: Lineage: dest4_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest4_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest4_sequencefile.* FROM dest4_sequencefile PREHOOK: type: QUERY PREHOOK: Input: default@dest4_sequencefile -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1578298324/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-29_070_5958022656043538384/10000 POSTHOOK: query: SELECT dest4_sequencefile.* FROM dest4_sequencefile POSTHOOK: type: QUERY POSTHOOK: Input: default@dest4_sequencefile -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1578298324/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-29_070_5958022656043538384/10000 +POSTHOOK: Lineage: dest4_sequencefile.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest4_sequencefile.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 238 val_238 86 val_86 311 val_311 Index: ql/src/test/results/clientpositive/udf_get_json_object.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_get_json_object.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_get_json_object.q.out (working copy) @@ -36,12 +36,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] PREHOOK: query: EXPLAIN SELECT get_json_object(src_json.json, '$.owner') FROM src_json PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT get_json_object(src_json.json, '$.owner') FROM src_json POSTHOOK: type: QUERY +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF src_json)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION get_json_object (. (TOK_TABLE_OR_COL src_json) json) '$.owner'))))) @@ -76,72 +78,80 @@ PREHOOK: query: SELECT get_json_object(src_json.json, '$') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1615998931/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-02_319_7151168440846403904/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1615998931/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-02_319_7151168440846403904/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] {"store":{"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],"book":[{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}],"basket":[[1,2,{"b":"y","a":"x"}],[3,4],[5,6]],"bicycle":{"price":19.95,"color":"red"}},"email":"amy@only_for_json_udf_test.net","owner":"amy"} PREHOOK: query: SELECT get_json_object(src_json.json, '$.owner'), get_json_object(src_json.json, '$.store') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/543860321/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-05_110_8058798712636834755/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$.owner'), get_json_object(src_json.json, '$.store') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/543860321/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-05_110_8058798712636834755/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] amy {"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],"book":[{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}],"basket":[[1,2,{"b":"y","a":"x"}],[3,4],[5,6]],"bicycle":{"price":19.95,"color":"red"}} PREHOOK: query: SELECT get_json_object(src_json.json, '$.store.bicycle'), get_json_object(src_json.json, '$.store.book') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1327255777/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-07_913_765933556871030158/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$.store.bicycle'), get_json_object(src_json.json, '$.store.book') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1327255777/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-07_913_765933556871030158/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] {"price":19.95,"color":"red"} [{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}] PREHOOK: query: SELECT get_json_object(src_json.json, '$.store.book[0]'), get_json_object(src_json.json, '$.store.book[*]') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/563232699/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-10_780_771812824049865181/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$.store.book[0]'), get_json_object(src_json.json, '$.store.book[*]') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/563232699/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-10_780_771812824049865181/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] {"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95} [{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}] PREHOOK: query: SELECT get_json_object(src_json.json, '$.store.book[0].category'), get_json_object(src_json.json, '$.store.book[*].category'), get_json_object(src_json.json, '$.store.book[*].isbn'), get_json_object(src_json.json, '$.store.book[*].reader') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1253200680/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-13_578_2330404574038685071/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$.store.book[0].category'), get_json_object(src_json.json, '$.store.book[*].category'), get_json_object(src_json.json, '$.store.book[*].isbn'), get_json_object(src_json.json, '$.store.book[*].reader') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1253200680/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-13_578_2330404574038685071/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] reference ["reference","fiction","fiction"] ["0-553-21311-3","0-395-19395-8"] [{"name":"bob","age":25},{"name":"jack","age":26}] PREHOOK: query: SELECT get_json_object(src_json.json, '$.store.book[*].reader[0].age'), get_json_object(src_json.json, '$.store.book[*].reader[*].age') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1931998473/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-16_461_380588120696280618/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$.store.book[*].reader[0].age'), get_json_object(src_json.json, '$.store.book[*].reader[*].age') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1931998473/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-16_461_380588120696280618/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 25 [25,26] PREHOOK: query: SELECT get_json_object(src_json.json, '$.store.basket[0][1]'), get_json_object(src_json.json, '$.store.basket[*]'), get_json_object(src_json.json, '$.store.basket[*][0]'), get_json_object(src_json.json, '$.store.basket[0][*]'), get_json_object(src_json.json, '$.store.basket[*][*]'), get_json_object(src_json.json, '$.store.basket[0][2].b'), get_json_object(src_json.json, '$.store.basket[0][*].b') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1551395780/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-19_263_683145549330957941/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$.store.basket[0][1]'), get_json_object(src_json.json, '$.store.basket[*]'), get_json_object(src_json.json, '$.store.basket[*][0]'), get_json_object(src_json.json, '$.store.basket[0][*]'), get_json_object(src_json.json, '$.store.basket[*][*]'), get_json_object(src_json.json, '$.store.basket[0][2].b'), get_json_object(src_json.json, '$.store.basket[0][*].b') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1551395780/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-19_263_683145549330957941/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] 2 [[1,2,{"b":"y","a":"x"}],[3,4],[5,6]] 1 [1,2,{"b":"y","a":"x"}] [1,2,{"b":"y","a":"x"},3,4,5,6] y ["y"] PREHOOK: query: SELECT get_json_object(src_json.json, '$.non_exist_key'), get_json_object(src_json.json, '$..no_recursive'), get_json_object(src_json.json, '$.store.book[10]'), get_json_object(src_json.json, '$.store.book[0].non_exist_key'), get_json_object(src_json.json, '$.store.basket[*].non_exist_key'), get_json_object(src_json.json, '$.store.basket[0][*].non_exist_key') FROM src_json PREHOOK: type: QUERY PREHOOK: Input: default@src_json -PREHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1199445708/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-22_240_6448484949571023675/10000 POSTHOOK: query: SELECT get_json_object(src_json.json, '$.non_exist_key'), get_json_object(src_json.json, '$..no_recursive'), get_json_object(src_json.json, '$.store.book[10]'), get_json_object(src_json.json, '$.store.book[0].non_exist_key'), get_json_object(src_json.json, '$.store.basket[*].non_exist_key'), get_json_object(src_json.json, '$.store.basket[0][*].non_exist_key') FROM src_json POSTHOOK: type: QUERY POSTHOOK: Input: default@src_json -POSTHOOK: Output: file:/Users/carl/Projects/hd8/hive-trunk/build/ql/tmp/1199445708/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-16-22_240_6448484949571023675/10000 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] NULL NULL NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out (working copy) @@ -163,7 +163,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-04-20_080_7842799634518360698/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_09-59-56_993_6091729158243759689/10003 Reduce Output Operator sort order: tag: -1 @@ -211,10 +211,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Output: default@t2 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] PREHOOK: query: EXPLAIN SELECT COUNT(1) FROM T2 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT COUNT(1) FROM T2 POSTHOOK: type: QUERY +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF T2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COUNT 1))))) @@ -269,35 +271,41 @@ PREHOOK: query: SELECT COUNT(1) FROM T2 PREHOOK: type: QUERY PREHOOK: Input: default@t2 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-05-57_107_6151270668878797225/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-01-53_898_6264657579539898890/10000 POSTHOOK: query: SELECT COUNT(1) FROM T2 POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-05-57_107_6151270668878797225/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-01-53_898_6264657579539898890/10000 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] 5000000 PREHOOK: query: DROP TABLE T3 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T3 POSTHOOK: type: DROPTABLE +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] PREHOOK: query: CREATE TABLE T3(name STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE T3(name STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@T3 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 PREHOOK: type: LOAD POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 POSTHOOK: type: LOAD POSTHOOK: Output: default@t3 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T3 PREHOOK: type: LOAD POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T3 POSTHOOK: type: LOAD POSTHOOK: Output: default@t3 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] PREHOOK: query: EXPLAIN SELECT COUNT(1) FROM T3 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT COUNT(1) FROM T3 POSTHOOK: type: QUERY +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF T3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COUNT 1))))) @@ -352,24 +360,28 @@ PREHOOK: query: SELECT COUNT(1) FROM T3 PREHOOK: type: QUERY PREHOOK: Input: default@t3 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-16_399_8325774968223283452/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-16_399_8155513791234028132/10000 POSTHOOK: query: SELECT COUNT(1) FROM T3 POSTHOOK: type: QUERY POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-06-16_399_8325774968223283452/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-02-16_399_8155513791234028132/10000 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] 1000 PREHOOK: query: DROP TABLE T1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t1 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] PREHOOK: query: DROP TABLE T2 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t2 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] PREHOOK: query: DROP TABLE T3 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE T3 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@t3 +POSTHOOK: Lineage: t2.name SIMPLE null[(t1)t1.FieldSchema(name:name, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/udf3.q.out =================================================================== --- ql/src/test/results/clientpositive/udf3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf3.q.out (working copy) @@ -104,12 +104,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.null, ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/675266909/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-05_873_2219256235951538166/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/675266909/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-10-05_873_2219256235951538166/10000 +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.null, ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.null, ] 0 NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/input_testxpath.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_testxpath.q.out (working copy) @@ -53,7 +53,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1924223039/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-31_426_3735898823574760424/10000 Stage: Stage-0 Move Operator @@ -68,7 +68,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1167800217/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-31_426_3735898823574760424/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -104,14 +104,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.mapvalue SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1380558580/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-34_776_5337848274364001541/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1380558580/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-34_776_5337848274364001541/10000 +POSTHOOK: Lineage: dest1.mapvalue SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 0 0 NULL 2 1 NULL 4 8 value_2 Index: ql/src/test/results/clientpositive/join14.q.out =================================================================== --- ql/src/test/results/clientpositive/join14.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join14.q.out (working copy) @@ -125,14 +125,18 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select dest1.* from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-24-57_029_61831322772510839/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-34_392_1321379335234072955/10000 POSTHOOK: query: select dest1.* from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-24-57_029_61831322772510839/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-29-34_392_1321379335234072955/10000 +POSTHOOK: Lineage: dest1.c2 SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.c1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 103 val_103 103 val_103 103 val_103 Index: ql/src/test/results/clientpositive/join37.q.out =================================================================== --- ql/src/test/results/clientpositive/join37.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join37.q.out (working copy) @@ -146,7 +146,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1631452878/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-45_309_3680801563329661154/10000 Stage: Stage-0 Move Operator @@ -161,7 +161,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1071764846/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-45_309_3680801563329661154/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -201,14 +201,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/456566543/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-49_076_8096988814114836183/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/456566543/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-33-49_076_8096988814114836183/10000 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 66 val_66 val_66 98 val_98 val_98 98 val_98 val_98 @@ -251,3 +257,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SIMPLE null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(src1)x.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby3.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby3.q.out (working copy) @@ -83,7 +83,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/925660734/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-14_359_4383824162522690019/10002 Reduce Output Operator sort order: tag: -1 @@ -211,17 +211,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/110467196/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-21_262_8723004250945966901/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/110467196/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-15-21_262_8723004250945966901/10000 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 130091.0 260.182 256.10355987055016 98.0 0.0 142.92680950752379 143.06995106518903 20428.072875999995 20469.010897795586 PREHOOK: query: DROP TABLE dest1 PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE dest1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c9 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c8 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c7 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c6 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c5 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c4 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c3 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c2 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.c1 UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy) @@ -154,7 +154,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -165,12 +165,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -218,7 +218,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -229,12 +229,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -243,12 +243,12 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - b {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + b {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -260,12 +260,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349116 + transient_lastDdlTime 1269536731 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -277,12 +277,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349116 + transient_lastDdlTime 1269536731 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -294,14 +294,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -311,20 +311,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -340,9 +340,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -353,12 +353,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -369,12 +369,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -383,7 +383,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-11-57_871_4266823850494267817/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-32_975_3957470914509688774/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -394,12 +394,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -420,14 +420,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-12-17_712_4521395371986247985/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-39_441_4179204836809310688/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-12-17_712_4521395371986247985/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-39_441_4179204836809310688/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -439,6 +445,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin b @@ -453,14 +465,32 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-12-52_889_4116073570307788189/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-51_655_8067705834386747827/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-12-52_889_4116073570307788189/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-51_655_8067705834386747827/10000 +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -472,20 +502,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-04_092_1313750161468604354/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-57_940_3004838472015835500/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-04_092_1313750161468604354/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-05-57_940_3004838472015835500/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] 0 0 0 PREHOOK: query: explain extended insert overwrite table bucketmapjoin_tmp_result @@ -499,6 +553,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcbucket_mapjoin a) (TOK_TABREF srcbucket_mapjoin b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB bucketmapjoin_tmp_result)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))))) @@ -549,7 +615,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -560,12 +626,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -613,7 +679,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -624,12 +690,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -638,12 +704,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin [b] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin [b] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -655,12 +721,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349116 + transient_lastDdlTime 1269536731 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -672,12 +738,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcbucket_mapjoin + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349116 + transient_lastDdlTime 1269536731 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -689,14 +755,14 @@ Move Operator files: hdfs directory: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10002 - destination: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -706,20 +772,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -735,9 +801,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10002] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -748,12 +814,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -764,12 +830,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -778,7 +844,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-12_006_272548200483203179/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-01_295_8383579961049426300/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -789,12 +855,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1268349117 + transient_lastDdlTime 1269536732 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -815,14 +881,44 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-33_739_668909043295636185/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-07_643_1146675309775707987/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-13-33_739_668909043295636185/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-07_643_1146675309775707987/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -834,6 +930,24 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin b @@ -848,14 +962,56 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-08_792_6803659313012587054/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-19_789_9135328439944248253/10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-08_792_6803659313012587054/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-19_789_9135328439944248253/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] 464 PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_2 select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result @@ -867,48 +1023,240 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_2 PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-23_718_2718303906358656762/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-25_923_3757111194399128786/10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_2 POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-460/build/ql/scratchdir/hive_2010-03-11_15-14-23_718_2718303906358656762/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-06-25_923_3757111194399128786/10000 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] 0 0 0 PREHOOK: query: drop table bucketmapjoin_hash_result_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table bucketmapjoin_hash_result_1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_hash_result_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_hash_result_1 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table bucketmapjoin_tmp_result PREHOOK: type: DROPTABLE POSTHOOK: query: drop table bucketmapjoin_tmp_result POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucketmapjoin_tmp_result +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] PREHOOK: query: drop table srcbucket_mapjoin_part_2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table srcbucket_mapjoin_part_2 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@srcbucket_mapjoin_part_2 +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_2.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE null[(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE null[(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] +POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 UDAF null[(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 927279) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -26,6 +26,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dst_union22@ds=1 +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: insert overwrite table dst_union22_delta partition (ds='1') select key, key, value, key, value, value from src PREHOOK: type: QUERY @@ -36,6 +40,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dst_union22_delta@ds=1 +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k5 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain extended insert overwrite table dst_union22 partition (ds='2') select * from @@ -62,6 +76,16 @@ ) subq POSTHOOK: type: QUERY +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k5 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF dst_union22_delta)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL k1) k1) (TOK_SELEXPR (TOK_TABLE_OR_COL k2) k2) (TOK_SELEXPR (TOK_TABLE_OR_COL k3) k3) (TOK_SELEXPR (TOK_TABLE_OR_COL k4) k4)) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '1') (<= (TOK_TABLE_OR_COL k0) 50))))) (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF dst_union22 a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF dst_union22_delta)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '1') (> (TOK_TABLE_OR_COL k0) 50))))) b) (and (= (. (TOK_TABLE_OR_COL a) k1) (. (TOK_TABLE_OR_COL b) k1)) (= (. (TOK_TABLE_OR_COL a) ds) '1')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST b))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) k1) k1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) k2) k2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k3) k3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k4) k4)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL a) k1) 20))))) subq)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dst_union22 (TOK_PARTSPEC (TOK_PARTVAL ds '2')))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) @@ -102,7 +126,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -157,7 +181,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -170,9 +194,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22/ds=1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22/ds=1 Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -185,13 +209,13 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266455098 + transient_lastDdlTime 1269541831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -202,13 +226,13 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266455098 + transient_lastDdlTime 1269541831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 name: dst_union22 @@ -216,7 +240,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10002 Select Operator expressions: expr: _col0 @@ -259,7 +283,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -270,13 +294,13 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266455098 + transient_lastDdlTime 1269541831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 TotalFiles: 1 @@ -320,7 +344,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -331,23 +355,23 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266455098 + transient_lastDdlTime 1269541831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10002] - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -363,7 +387,7 @@ columns _col0,_col1,_col8,_col9 columns.types string,string,string,string escape.delim \ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22_delta/ds=1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22_delta/ds=1 Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -376,13 +400,13 @@ columns.types string:string:string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22_delta + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22_delta name dst_union22_delta partition_columns ds serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266455098 + transient_lastDdlTime 1269541831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -393,13 +417,13 @@ columns.types string:string:string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22_delta + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22_delta name dst_union22_delta partition_columns ds serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266455098 + transient_lastDdlTime 1269541831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22_delta name: dst_union22_delta @@ -410,7 +434,7 @@ partition: ds 2 replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -420,16 +444,16 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dst_union22 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266455098 + transient_lastDdlTime 1269541831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-06_416_2629881536907127419/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-38_097_7973823170867822397/10001 PREHOOK: query: insert overwrite table dst_union22 partition (ds='2') @@ -462,14 +486,42 @@ POSTHOOK: Input: default@dst_union22_delta@ds=1 POSTHOOK: Input: default@dst_union22@ds=1 POSTHOOK: Output: default@dst_union22@ds=2 +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k4 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k1 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k0, type:string, comment:null), (dst_union22)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k2 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k1, type:string, comment:null), (dst_union22)a.FieldSchema(name:k1, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k3 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k5 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from dst_union22 where ds = '2' order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@dst_union22@ds=2 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-16_890_6969130298772384260/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-46_080_7493948303201194260/10000 POSTHOOK: query: select * from dst_union22 where ds = '2' order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dst_union22@ds=2 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-05-16_890_6969130298772384260/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-30-46_080_7493948303201194260/10000 +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k4 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k1 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k0, type:string, comment:null), (dst_union22)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k2 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k1, type:string, comment:null), (dst_union22)a.FieldSchema(name:k1, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k3 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k5 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 2 0 val_0 0 val_0 2 0 val_0 0 val_0 2 @@ -1491,8 +1543,36 @@ POSTHOOK: query: drop table dst_union22 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dst_union22 +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k4 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k1 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k0, type:string, comment:null), (dst_union22)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k2 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k1, type:string, comment:null), (dst_union22)a.FieldSchema(name:k1, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k3 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k5 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: drop table dst_union22_delta PREHOOK: type: DROPTABLE POSTHOOK: query: drop table dst_union22_delta POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dst_union22_delta +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k4 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k3, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k1 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k0, type:string, comment:null), (dst_union22)a.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k2 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k1, type:string, comment:null), (dst_union22)a.FieldSchema(name:k1, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22 PARTITION(ds=2).k3 SET null[(dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), (dst_union22_delta)dst_union22_delta.FieldSchema(name:k2, type:string, comment:null), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k0 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k1 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k2 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k3 SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k4 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dst_union22_delta PARTITION(ds=1).k5 SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -52,7 +52,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -63,21 +63,21 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452757 + transient_lastDdlTime 1269539513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition base file name: srcbucket0.txt input format: org.apache.hadoop.mapred.TextInputFormat @@ -89,12 +89,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452756 + transient_lastDdlTime 1269539512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -106,12 +106,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket name srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452756 + transient_lastDdlTime 1269539512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket name: srcbucket @@ -123,14 +123,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -140,20 +140,20 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452757 + transient_lastDdlTime 1269539513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -167,9 +167,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -180,12 +180,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452757 + transient_lastDdlTime 1269539513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -196,12 +196,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452757 + transient_lastDdlTime 1269539513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -210,7 +210,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-25-57_766_6234470953739407099/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-53_353_8833192780064885232/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -221,12 +221,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266452757 + transient_lastDdlTime 1269539513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -243,14 +243,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-01_890_6288044394002583077/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-56_884_8356538115849178141/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-26-01_890_6288044394002583077/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-51-56_884_8356538115849178141/10000 +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] 474 val_475 62 val_63 468 val_469 Index: ql/src/test/results/clientpositive/input28.q.out =================================================================== --- ql/src/test/results/clientpositive/input28.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input28.q.out (working copy) @@ -24,16 +24,22 @@ POSTHOOK: Input: default@tst@d=2009-01-01 POSTHOOK: Input: default@src POSTHOOK: Output: default@tst@d=2009-01-01 +POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).a SIMPLE null[(tst)tst.FieldSchema(name:d, type:string, comment:null), ] PREHOOK: query: select * from tst where tst.d='2009-01-01' PREHOOK: type: QUERY PREHOOK: Input: default@tst@d=2009-01-01 -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/203929646/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-25_401_5241285422903313912/10000 POSTHOOK: query: select * from tst where tst.d='2009-01-01' POSTHOOK: type: QUERY POSTHOOK: Input: default@tst@d=2009-01-01 -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/203929646/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-22-25_401_5241285422903313912/10000 +POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).a SIMPLE null[(tst)tst.FieldSchema(name:d, type:string, comment:null), ] PREHOOK: query: drop table tst PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tst POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@tst +POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).b SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).a SIMPLE null[(tst)tst.FieldSchema(name:d, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out =================================================================== --- ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (revision 927279) +++ ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (working copy) @@ -49,9 +49,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/test/data/warehouse/src [src] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [src] Path -> Partition: - file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -62,12 +62,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1269370923 + transient_lastDdlTime 1269537166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -78,12 +78,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1269370923 + transient_lastDdlTime 1269537166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -99,7 +99,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_12-02-03_982_566371038266932645/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-12-46_753_4139730212717833471/10000 NumFilesPerFileSink: 2 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -111,12 +111,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/test/data/warehouse/bucket2_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket2_1 name bucket2_1 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1269370923 + transient_lastDdlTime 1269537166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket2_1 TotalFiles: 2 @@ -126,7 +126,7 @@ Move Operator tables: replace: true - source: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_12-02-03_982_566371038266932645/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-12-46_753_4139730212717833471/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -137,15 +137,15 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/test/data/warehouse/bucket2_1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/bucket2_1 name bucket2_1 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1269370923 + transient_lastDdlTime 1269537166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucket2_1 - tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_12-02-03_982_566371038266932645/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-12-46_753_4139730212717833471/10001 PREHOOK: query: insert overwrite table bucket2_1 @@ -158,12 +158,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@bucket2_1 +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: explain select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key PREHOOK: type: QUERY POSTHOOK: query: explain select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key POSTHOOK: type: QUERY +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF bucket2_1 (TOK_TABLESAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))))) @@ -221,11 +225,13 @@ PREHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key PREHOOK: type: QUERY PREHOOK: Input: default@bucket2_1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_12-02-38_972_2579884827618647110/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-12-50_256_3540354991280429099/10000 POSTHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucket2_1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive_RCFile/build/ql/scratchdir/hive_2010-03-23_12-02-38_972_2579884827618647110/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-12-50_256_3540354991280429099/10000 +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 @@ -478,3 +484,5 @@ POSTHOOK: query: drop table bucket2_1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@bucket2_1 +POSTHOOK: Lineage: bucket2_1.value SIMPLE null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: bucket2_1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input_testxpath2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath2.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_testxpath2.q.out (working copy) @@ -61,7 +61,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1740584016/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-37_198_5258296369031804796/10000 Stage: Stage-0 Move Operator @@ -76,7 +76,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/249629882/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-37_198_5258296369031804796/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -112,14 +112,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src_thrift POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.mstringstring_size SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.lintstring_size SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.lint_size SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/662681223/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-40_871_1397605825561872321/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/662681223/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-27-40_871_1397605825561872321/10000 +POSTHOOK: Lineage: dest1.mstringstring_size SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.lintstring_size SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lintstring, type:array, comment:from deserializer), ] +POSTHOOK: Lineage: dest1.lint_size SIMPLE null[(src_thrift)src_thrift.FieldSchema(name:lint, type:array, comment:from deserializer), ] 3 1 1 3 1 1 3 1 1 Index: ql/src/test/results/clientpositive/udf_10_trims.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_10_trims.q.out (revision 927279) +++ ql/src/test/results/clientpositive/udf_10_trims.q.out (working copy) @@ -61,7 +61,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/152678884/10000 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-11-07_729_1213993511794688775/10000 Stage: Stage-0 Move Operator @@ -76,7 +76,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/2133129550/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-11-07_729_1213993511794688775/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -112,3 +112,4 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.c1 SIMPLE null[] Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 927279) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -48,7 +48,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10003 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -84,7 +84,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10003 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -97,9 +97,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [y] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src [y] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,12 +110,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538355 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -126,12 +126,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538355 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -139,7 +139,7 @@ Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10003 Select Operator expressions: expr: _col0 @@ -182,7 +182,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -193,12 +193,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538356 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -261,7 +261,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -272,21 +272,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538356 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10003 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10003] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10003 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10003] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10003 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10003 Partition base file name: 10003 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -310,14 +310,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -327,20 +327,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538356 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -356,9 +356,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -369,12 +369,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538356 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -385,12 +385,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538356 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -399,7 +399,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-40_907_1655518346126745740/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-36_401_1815893450730525335/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -410,12 +410,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266451300 + transient_lastDdlTime 1269538356 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -440,14 +440,20 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-50_498_3146515945044022967/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-43_491_5053285816853028744/10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_16-01-50_498_3146515945044022967/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-32-43_491_5053285816853028744/10000 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] 146 val_146 val_146 146 val_146 val_146 146 val_146 val_146 @@ -538,3 +544,6 @@ POSTHOOK: query: drop table dest_j1 POSTHOOK: type: DROPTABLE POSTHOOK: Output: default@dest_j1 +POSTHOOK: Lineage: dest_j1.val2 SET null[(src)y.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest_j1.value SIMPLE null[(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest_j1.key SIMPLE null[(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input14.q.out =================================================================== --- ql/src/test/results/clientpositive/input14.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input14.q.out (working copy) @@ -121,14 +121,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1581739074/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-33_757_9128615207535868548/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/1581739074/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-20-33_757_9128615207535868548/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 927279) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -63,7 +63,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10002 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -74,21 +74,21 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450780 + transient_lastDdlTime 1269537976 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -102,13 +102,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450766 + transient_lastDdlTime 1269537975 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -119,13 +119,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450766 + transient_lastDdlTime 1269537975 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -137,14 +137,14 @@ Move Operator files: hdfs directory: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10002 - destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10002 + destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10000 Stage: Stage-0 Move Operator tables: replace: true - source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10000 + source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -154,20 +154,20 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450780 + transient_lastDdlTime 1269537976 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10001 + tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -185,9 +185,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10002] + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10002] Path -> Partition: - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10002 Partition base file name: 10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -198,12 +198,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450780 + transient_lastDdlTime 1269537976 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -214,12 +214,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450780 + transient_lastDdlTime 1269537976 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 name: dest1 @@ -228,7 +228,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-00_636_5982093198769084732/10000 + directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-16_853_363841125883666825/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -239,12 +239,12 @@ columns.types int:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1266450780 + transient_lastDdlTime 1269537976 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 @@ -261,14 +261,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-05_156_8015804684749080106/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-20_891_465300167284552533/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-53-05_156_8015804684749080106/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-26-20_891_465300167284552533/10000 +POSTHOOK: Lineage: dest1.ds SIMPLE null[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.hr SIMPLE null[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.value SIMPLE null[(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] 86 val_86 12 2008-04-08 27 val_27 12 2008-04-08 98 val_98 12 2008-04-08 Index: ql/src/test/results/clientpositive/mapreduce4.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce4.q.out (revision 927279) +++ ql/src/test/results/clientpositive/mapreduce4.q.out (working copy) @@ -122,14 +122,22 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/2140604895/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-15_331_2921253979662013361/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/nzhang/work/870/apache-hive/build/ql/tmp/2140604895/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-39-15_331_2921253979662013361/10000 +POSTHOOK: Lineage: dest1.value SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.one SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.ten SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SCRIPT null[(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 90 9 0 val_90 90 9 0 val_90 90 9 0 val_90 Index: ql/src/test/results/clientpositive/groupby5_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5_noskew.q.out (revision 927279) +++ ql/src/test/results/clientpositive/groupby5_noskew.q.out (working copy) @@ -106,14 +106,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/890209384/10000 +PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-35_656_5248861515211015027/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/890209384/10000 +POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_10-16-35_656_5248861515211015027/10000 +POSTHOOK: Lineage: dest1.value UDAF null[(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: dest1.key SIMPLE null[(src)src.FieldSchema(name:key, type:string, comment:default), ] 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample1.q.xml (revision 927279) +++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-2 @@ -75,11 +75,11 @@ location - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1268858254 + 1269542525 @@ -136,7 +136,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-35_998_1806468153291132483/10001 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-07_748_2515284136233771974/10001 1 @@ -999,6 +999,26 @@ + + + + + + + + + + + + + + + + + + + + @@ -1059,7 +1079,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 s @@ -1071,7 +1091,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 hr=11 @@ -1141,11 +1161,11 @@ location - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1268858254 + 1269542525 Index: ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample2.q.xml (revision 927279) +++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -41,7 +41,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10002 @@ -286,10 +286,10 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10002 @@ -298,7 +298,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10002 10002 @@ -352,11 +352,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1266455948 + 1269542530 @@ -395,7 +395,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10000 1 @@ -447,7 +447,7 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -455,7 +455,7 @@ transient_lastDdlTime - 1266455948 + 1269542530 @@ -596,13 +596,13 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10000 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10001 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10001 @@ -623,10 +623,10 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10000 @@ -652,7 +652,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10002 @@ -742,11 +742,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455947 + 1269542529 @@ -802,7 +802,7 @@ 1 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-08_637_4922142923436601401/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-10_286_1314906919273172928/10002 1 @@ -1134,6 +1134,34 @@ + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + @@ -1302,24 +1330,10 @@ - - - key - - - - - + - - - value - - - - - + @@ -1379,7 +1393,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1391,7 +1405,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt srcbucket0.txt @@ -1452,11 +1466,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455947 + 1269542529 Index: ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample3.q.xml (revision 927279) +++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -41,7 +41,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10002 @@ -286,10 +286,10 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10002 @@ -298,7 +298,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10002 10002 @@ -352,11 +352,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1266455952 + 1269542532 @@ -395,7 +395,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10000 1 @@ -447,7 +447,7 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -455,7 +455,7 @@ transient_lastDdlTime - 1266455952 + 1269542532 @@ -596,13 +596,13 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10000 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10001 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10001 @@ -623,10 +623,10 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10000 @@ -652,7 +652,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10002 @@ -742,11 +742,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455950 + 1269542531 @@ -802,7 +802,7 @@ 1 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-12_385_4762854248744759404/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-12_822_228779851864728006/10002 1 @@ -1144,6 +1144,34 @@ + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + @@ -1325,24 +1353,10 @@ - - - key - - - - - + - - - value - - - - - + @@ -1402,7 +1416,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket s @@ -1414,7 +1428,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket srcbucket @@ -1475,11 +1489,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455950 + 1269542531 Index: ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample4.q.xml (revision 927279) +++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -41,7 +41,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10002 @@ -286,10 +286,10 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10002 @@ -298,7 +298,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10002 10002 @@ -352,11 +352,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1266455955 + 1269542535 @@ -395,7 +395,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10000 1 @@ -447,7 +447,7 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -455,7 +455,7 @@ transient_lastDdlTime - 1266455955 + 1269542535 @@ -596,13 +596,13 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10000 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10001 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10001 @@ -623,10 +623,10 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10000 @@ -652,7 +652,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10002 @@ -742,11 +742,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455953 + 1269542534 @@ -802,7 +802,7 @@ 1 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-15_437_1539749685196717079/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-15_294_7079646750879741531/10002 1 @@ -1134,6 +1134,34 @@ + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + @@ -1302,24 +1330,10 @@ - - - key - - - - - + - - - value - - - - - + @@ -1379,7 +1393,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1391,7 +1405,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt srcbucket0.txt @@ -1452,11 +1466,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455953 + 1269542534 Index: ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample5.q.xml (revision 927279) +++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -41,7 +41,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10002 @@ -286,10 +286,10 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10002 @@ -298,7 +298,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10002 10002 @@ -352,11 +352,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1266455958 + 1269542537 @@ -395,7 +395,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10000 1 @@ -447,7 +447,7 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -455,7 +455,7 @@ transient_lastDdlTime - 1266455958 + 1269542537 @@ -596,13 +596,13 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10000 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10001 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10001 @@ -623,10 +623,10 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10000 @@ -652,7 +652,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10002 @@ -742,11 +742,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455956 + 1269542536 @@ -802,7 +802,7 @@ 1 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-18_494_770929746008325283/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-17_808_17314706536125962/10002 1 @@ -1131,6 +1131,34 @@ + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + @@ -1299,24 +1327,10 @@ - - - key - - - - - + - - - value - - - - - + @@ -1376,7 +1390,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket s @@ -1388,7 +1402,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket srcbucket @@ -1449,11 +1463,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455956 + 1269542536 Index: ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample6.q.xml (revision 927279) +++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -41,7 +41,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10002 @@ -286,10 +286,10 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10002 @@ -298,7 +298,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10002 10002 @@ -352,11 +352,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1266455961 + 1269542540 @@ -395,7 +395,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10000 1 @@ -447,7 +447,7 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -455,7 +455,7 @@ transient_lastDdlTime - 1266455961 + 1269542540 @@ -596,13 +596,13 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10000 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10001 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10001 @@ -623,10 +623,10 @@ true - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10002 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10000 @@ -652,7 +652,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10002 @@ -742,11 +742,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455960 + 1269542539 @@ -802,7 +802,7 @@ 1 - file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-19-21_607_1177742110795202748/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-20_326_3547898976347933960/10002 1 @@ -1134,6 +1134,34 @@ + + + + + + + + key + + + + + + + + + + value + + + + + + + + + + @@ -1302,24 +1330,10 @@ - - - key - - - - - + - - - value - - - - - + @@ -1379,7 +1393,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1391,7 +1405,7 @@ - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt srcbucket0.txt @@ -1452,11 +1466,11 @@ location - file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1266455960 + 1269542539 Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 927279) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -41,7 +41,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10002 @@ -286,10 +286,10 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10002 - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10002 @@ -298,7 +298,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10002 10002 @@ -352,11 +352,11 @@ location - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1268858268 + 1269542542 @@ -395,7 +395,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10000 1 @@ -447,7 +447,7 @@ location - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -455,7 +455,7 @@ transient_lastDdlTime - 1268858268 + 1269542542 @@ -596,13 +596,13 @@ true - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10000 - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10001 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10001 @@ -623,10 +623,10 @@ true - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10002 - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10000 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10000 @@ -652,7 +652,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10002 @@ -742,11 +742,11 @@ location - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1268858267 + 1269542541 @@ -806,7 +806,7 @@ 1 - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-17_13-37-48_704_4828253061022072997/10002 + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/scratchdir/hive_2010-03-25_11-42-22_888_2527311663937672623/10002 1 @@ -1247,6 +1247,20 @@ + + + + + + + + + + + + + + @@ -1549,7 +1563,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1561,7 +1575,7 @@ - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt srcbucket0.txt @@ -1622,11 +1636,11 @@ location - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1268858267 + 1269542541 Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (revision 927279) +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (working copy) @@ -18,8 +18,15 @@ package org.apache.hadoop.hive.ql.hooks; +import java.util.Iterator; +import java.util.Map; import java.util.Set; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyKey; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.security.UserGroupInformation; @@ -32,7 +39,8 @@ @Override public void run(SessionState sess, Set inputs, - Set outputs, UserGroupInformation ugi) throws Exception { + Set outputs, LineageInfo linfo, + UserGroupInformation ugi) throws Exception { LogHelper console = SessionState.getConsole(); @@ -51,6 +59,47 @@ for (WriteEntity we : outputs) { console.printError("POSTHOOK: Output: " + we.toString()); } + + // Also print out the generic lineage information if there is any + if (linfo != null) { + Iterator> iter = linfo.iterator(); + while(iter.hasNext()) { + Map.Entry it = iter.next(); + Dependency dep = it.getValue(); + DependencyKey depK = it.getKey(); + + StringBuilder sb = new StringBuilder(); + sb.append("POSTHOOK: Lineage: "); + if (depK.getDataContainer().isPartition()) { + Partition part = depK.getDataContainer().getPartition(); + sb.append(part.getTableName()); + sb.append(" PARTITION("); + int i = 0; + for (FieldSchema fs : depK.getDataContainer().getTable().getPartitionKeys()) { + if (i != 0) { + sb.append(","); + } + sb.append(fs.getName() + "=" + part.getValues().get(i++)); + } + sb.append(")"); + } + else { + sb.append(depK.getDataContainer().getTable().getTableName()); + } + sb.append("." + depK.getFieldSchema().getName() + " " + + dep.getType() + " " + dep.getExpr()); + + sb.append("["); + for(BaseColumnInfo col: dep.getBaseCols()) { + sb.append("("+col.getTabAlias().getTable().getTableName() + ")" + + col.getTabAlias().getAlias() + "." + + col.getColumn() + ", "); + } + sb.append("]"); + + console.printError(sb.toString()); + } + } } } Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (revision 927279) +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (working copy) @@ -32,7 +32,8 @@ @Override public void run(SessionState sess, Set inputs, - Set outputs, UserGroupInformation ugi) throws Exception { + Set outputs, UserGroupInformation ugi) + throws Exception { LogHelper console = SessionState.getConsole(); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.ppd.PredicatePushDown; +import org.apache.hadoop.hive.ql.optimizer.lineage.Generator; /** * Implementation of the optimizer. @@ -42,6 +43,8 @@ */ public void initialize(HiveConf hiveConf) { transformations = new ArrayList(); + // Add the transformation that computes the lineage information. + transformations.add(new Generator()); if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCP)) { transformations.add(new ColumnPruner()); } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java (revision 0) @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.lineage; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; + +/** + * The processor context for the lineage information. This contains the + * lineage context and the column info and operator information that is + * being used for the current expression. + */ +public class ExprProcCtx implements NodeProcessorCtx { + + /** + * The lineage context that is being populated. + */ + private LineageCtx lctx; + + /** + * The input operator in case the current operator is not a leaf. + */ + private Operator inpOp; + + /** + * Constructor. + * + * @param lctx The lineage context thatcontains the dependencies for the inputs. + * @param inpOp The input operator to the current operator. + */ + public ExprProcCtx(LineageCtx lctx, + Operator inpOp) { + this.lctx = lctx; + this.inpOp = inpOp; + } + + /** + * Gets the lineage context. + * + * @return LineageCtx The lineage context. + */ + public LineageCtx getLineageCtx() { + return lctx; + } + + /** + * Gets the input operator. + * + * @return Operator The input operator - this is null in case the current + * operator is a leaf. + */ + public Operator getInputOperator() { + return inpOp; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java (revision 0) @@ -0,0 +1,160 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.lineage; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.hooks.LineageInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.parse.ParseContext; + +/** + * This class contains the lineage context that is passed + * while walking the operator tree in Lineage. The context + * contains the LineageInfo structure that is passed to the + * pre-execution hooks. + */ +public class LineageCtx implements NodeProcessorCtx { + + public static class Index { + + /** + * Serial Version UID. + */ + private static final long serialVersionUID = 1L; + + /** + * The map contains an index from the (operator, columnInfo) to the + * dependency vector for that tuple. This is used to generate the + * dependency vectors during the walk of the operator tree. + */ + private final Map, HashMap> depMap; + + /** + * Constructor. + */ + public Index() { + depMap = new HashMap, HashMap>(); + } + + /** + * Gets the dependency for an operator, columninfo tuple. + * @param op The operator whose dependency is being inspected. + * @param col The column info whose dependency is being inspected. + * @return Dependency for that particular operator, columninfo tuple. + * null if no dependency is found. + */ + public Dependency getDependency(Operator op, ColumnInfo col) { + HashMap colMap = depMap.get(op); + if (colMap == null) { + return null; + } + + return colMap.get(col); + } + + /** + * Puts the dependency for an operator, columninfo tuple. + * @param op The operator whose dependency is being inserted. + * @param col The column info whose dependency is being inserted. + * @param dep The dependency. + */ + public void putDependency(Operator op, + ColumnInfo col, Dependency dep) { + HashMap colMap = depMap.get(op); + if (colMap == null) { + colMap = new HashMap(); + depMap.put(op, colMap); + } + colMap.put(col, dep); + } + + /** + * Merges the new dependencies in dep to the existing dependencies + * of (op, ci). + * + * @param op The operator of the column whose dependency is being modified. + * @param ci The column info of the associated column. + * @param dependency The new dependency. + */ + public void mergeDependency(Operator op, + ColumnInfo ci, Dependency dep) { + Dependency old_dep = getDependency(op, ci); + if (old_dep == null) { + putDependency(op, ci, dep); + } else { + old_dep.setType(LineageInfo.DependencyType.SET); + Set bci_set = new LinkedHashSet(old_dep.getBaseCols()); + bci_set.addAll(dep.getBaseCols()); + old_dep.setBaseCols(new ArrayList(bci_set)); + // TODO: Fix the expressions later. + old_dep.setExpr(null); + } + } + } + + /** + * The map contains an index from the (operator, columnInfo) to the + * dependency vector for that tuple. This is used to generate the + * dependency vectors during the walk of the operator tree. + */ + private final Index index; + + /** + * Parse context to get to the table metadata information. + */ + private final ParseContext pctx; + + /** + * Constructor. + * + * @param pctx The parse context that is used to get table metadata information. + */ + public LineageCtx(ParseContext pctx) { + index = new Index(); + this.pctx = pctx; + } + + /** + * Gets the parse context. + * + * @return ParseContext + */ + public ParseContext getParseCtx() { + return pctx; + } + + /** + * Gets the dependency index. + * + * @return Index + */ + public Index getIndex() { + return index; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/Generator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/Generator.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/Generator.java (revision 0) @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.lineage; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; +import org.apache.hadoop.hive.ql.lib.Dispatcher; +import org.apache.hadoop.hive.ql.lib.GraphWalker; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.PreOrderWalker; +import org.apache.hadoop.hive.ql.lib.Rule; +import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.optimizer.Transform; +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * This class generates the lineage information for the columns + * and tables from the plan before it goes through other + * optimization phases. + */ +public class Generator implements Transform { + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.ql.optimizer.Transform#transform(org.apache.hadoop.hive.ql.parse.ParseContext) + */ + @Override + public ParseContext transform(ParseContext pctx) throws SemanticException { + + // Create the lineage context + LineageCtx lCtx = new LineageCtx(pctx); + + Map opRules = new LinkedHashMap(); + opRules.put(new RuleRegExp("R1", "TS%"), OpProcFactory.getTSProc()); + opRules.put(new RuleRegExp("R2", "SCR%"), OpProcFactory.getTransformProc()); + opRules.put(new RuleRegExp("R3", "UDTF%"), OpProcFactory.getTransformProc()); + opRules.put(new RuleRegExp("R4", "SEL%"), OpProcFactory.getSelProc()); + opRules.put(new RuleRegExp("R5", "GBY%"), OpProcFactory.getGroupByProc()); + opRules.put(new RuleRegExp("R6", "UNION%"), OpProcFactory.getUnionProc()); + opRules.put(new RuleRegExp("R7", "JOIN%|MAPJOIN%"), OpProcFactory.getJoinProc()); + opRules.put(new RuleRegExp("R8", "RS%"), OpProcFactory.getReduceSinkProc()); + opRules.put(new RuleRegExp("R9", "LVJ%"), OpProcFactory.getLateralViewJoinProc()); + + // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along + Dispatcher disp = new DefaultRuleDispatcher(OpProcFactory.getDefaultProc(), opRules, lCtx); + GraphWalker ogw = new PreOrderWalker(disp); + + // Create a list of topop nodes + ArrayList topNodes = new ArrayList(); + topNodes.addAll(pctx.getTopOps().values()); + ogw.startWalking(topNodes, null); + + // Transfer the index from the lineage context to the session state. + if (SessionState.get() != null) { + SessionState.get().getLineageState().setIndex(lCtx.getIndex()); + } + + return pctx; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java (revision 0) @@ -0,0 +1,209 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.lineage; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Stack; + +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.hooks.LineageInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency; +import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; +import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; +import org.apache.hadoop.hive.ql.lib.Dispatcher; +import org.apache.hadoop.hive.ql.lib.GraphWalker; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Rule; +import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc; + +/** + * Expression processor factory for lineage. Each processor is responsible to + * create the leaf level column info objects that the expression depends upon + * and also generates a string representation of the expression. + */ +public class ExprProcFactory { + + /** + * Processor for column expressions. + */ + public static class ColumnExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + ExprNodeColumnDesc cd = (ExprNodeColumnDesc) nd; + ExprProcCtx epc = (ExprProcCtx) procCtx; + + // assert that the input operator is not null as there are no + // exprs associated with table scans. + assert (epc.getInputOperator() != null); + + ColumnInfo inp_ci = null; + for (ColumnInfo tmp_ci : epc.getInputOperator().getSchema() + .getSignature()) { + if (tmp_ci.getInternalName().equals(cd.getColumn())) { + inp_ci = tmp_ci; + break; + } + } + + // Insert the dependencies of inp_ci to that of the current operator, ci + LineageCtx lc = epc.getLineageCtx(); + Dependency dep = lc.getIndex().getDependency(epc.getInputOperator(), inp_ci); + + return dep; + } + + } + + /** + * Processor for any function or field expression. + */ + public static class GenericExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + assert (nd instanceof ExprNodeGenericFuncDesc || nd instanceof ExprNodeFieldDesc); + + // Concatenate the dependencies of all the children to compute the new + // dependency. + Dependency dep = new Dependency(); + + LinkedHashSet bci_set = new LinkedHashSet(); + dep.setType(LineageInfo.DependencyType.UDF); + + for (Object child : nodeOutputs) { + if (child == null) { + continue; + } + + Dependency child_dep = (Dependency) child; + if (child_dep.getType() != LineageInfo.DependencyType.UDF) { + dep.setType(child_dep.getType()); + } + bci_set.addAll(child_dep.getBaseCols()); + } + + dep.setBaseCols(new ArrayList(bci_set)); + + return dep; + } + + } + + /** + * Processor for constants and null expressions. For such expressions the + * processor simply returns a null dependency vector. + */ + public static class DefaultExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + assert (nd instanceof ExprNodeConstantDesc || nd instanceof ExprNodeNullDesc); + + // Create a dependency that has no basecols + Dependency dep = new Dependency(); + dep.setType(LineageInfo.DependencyType.SIMPLE); + dep.setBaseCols(new ArrayList()); + return dep; + } + } + + public static NodeProcessor getDefaultExprProcessor() { + return new DefaultExprProcessor(); + } + + public static NodeProcessor getGenericFuncProcessor() { + return new GenericExprProcessor(); + } + + public static NodeProcessor getFieldProcessor() { + return new GenericExprProcessor(); + } + + public static NodeProcessor getColumnProcessor() { + return new ColumnExprProcessor(); + } + + /** + * Gets the expression dependencies for the expression. + * + * @param lctx + * The lineage context containing the input operators dependencies. + * @param inpOp + * The input operator to the current operator. + * @param expr + * The expression that is being processed. + * @throws SemanticException + */ + public static Dependency getExprDependency(LineageCtx lctx, + Operator inpOp, ExprNodeDesc expr) + throws SemanticException { + + // Create the walker, the rules dispatcher and the context. + ExprProcCtx exprCtx = new ExprProcCtx(lctx, inpOp); + + // create a walker which walks the tree in a DFS manner while maintaining + // the operator stack. The dispatcher + // generates the plan from the operator tree + Map exprRules = new LinkedHashMap(); + exprRules.put( + new RuleRegExp("R1", ExprNodeColumnDesc.class.getName() + "%"), + getColumnProcessor()); + exprRules.put( + new RuleRegExp("R2", ExprNodeFieldDesc.class.getName() + "%"), + getFieldProcessor()); + exprRules.put(new RuleRegExp("R3", ExprNodeGenericFuncDesc.class.getName() + + "%"), getGenericFuncProcessor()); + + // The dispatcher fires the processor corresponding to the closest matching + // rule and passes the context along + Dispatcher disp = new DefaultRuleDispatcher(getDefaultExprProcessor(), + exprRules, exprCtx); + GraphWalker egw = new DefaultGraphWalker(disp); + + List startNodes = new ArrayList(); + startNodes.add(expr); + + HashMap outputMap = new HashMap(); + egw.startWalking(startNodes, outputMap); + return (Dependency)outputMap.get(expr); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java (revision 0) @@ -0,0 +1,512 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.lineage; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.Stack; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.ForwardOperator; +import org.apache.hadoop.hive.ql.exec.GroupByOperator; +import org.apache.hadoop.hive.ql.exec.JoinOperator; +import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; +import org.apache.hadoop.hive.ql.exec.RowSchema; +import org.apache.hadoop.hive.ql.exec.SelectOperator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.hooks.LineageInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.TableAliasInfo; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Utils; +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.AggregationDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.JoinDesc; +import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; + +/** + * Operator factory for the rule processors for lineage. + */ +public class OpProcFactory { + + /** + * Returns the parent operator in the walk path to the current operator. + * + * @param stack The stack encoding the path. + * + * @return Operator The parent operator in the current path. + */ + protected static Operator getParent(Stack stack) { + return (Operator)Utils.getNthAncestor(stack, 1); + } + + /** + * Processor for Script and UDTF Operators. + */ + public static class TransformLineage extends DefaultLineage implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + // LineageCTx + LineageCtx lCtx = (LineageCtx) procCtx; + + // The operators + Operator op = (Operator)nd; + Operator inpOp = getParent(stack); + + // Create a single dependency list by concatenating the dependencies of all + // the cols + Dependency dep = new Dependency(); + dep.setType(LineageInfo.DependencyType.SCRIPT); + // TODO: Fix this to a non null value. + dep.setExpr(null); + + LinkedHashSet col_set = new LinkedHashSet(); + for(ColumnInfo ci : inpOp.getSchema().getSignature()) { + Dependency d = lCtx.getIndex().getDependency(inpOp, ci); + if (d != null) { + col_set.addAll(d.getBaseCols()); + } + } + + dep.setBaseCols(new ArrayList(col_set)); + + // This dependency is then set for all the colinfos of the script operator + for(ColumnInfo ci : op.getSchema().getSignature()) { + lCtx.getIndex().putDependency(op, ci, dep); + } + + return null; + } + + } + + /** + * Processor for TableScan Operator. This actually creates the base column mappings. + */ + public static class TableScanLineage extends DefaultLineage implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + // LineageCtx + LineageCtx lCtx = (LineageCtx) procCtx; + ParseContext pctx = lCtx.getParseCtx(); + + // Table scan operator. + TableScanOperator top = (TableScanOperator)nd; + org.apache.hadoop.hive.ql.metadata.Table t = pctx.getTopToTable().get(top); + Table tab = t.getTTable(); + + // Generate the mappings + RowSchema rs = top.getSchema(); + List cols = t.getAllCols(); + TableAliasInfo tai = new TableAliasInfo(); + tai.setAlias(top.getConf().getAlias()); + tai.setTable(tab); + int cnt = 0; + for(ColumnInfo ci : rs.getSignature()) { + // Create a dependency + Dependency dep = new Dependency(); + BaseColumnInfo bci = new BaseColumnInfo(); + bci.setTabAlias(tai); + bci.setColumn(cols.get(cnt++)); + + // Populate the dependency + dep.setType(LineageInfo.DependencyType.SIMPLE); + // TODO: Find out how to get the expression here. + dep.setExpr(null); + dep.setBaseCols(new ArrayList()); + dep.getBaseCols().add(bci); + + // Put the dependency in the map + lCtx.getIndex().putDependency(top, ci, dep); + } + + return null; + } + + } + + /** + * Processor for Join Operator. + */ + public static class JoinLineage extends DefaultLineage implements NodeProcessor { + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + // Assert that there is atleast one item in the stack. This should never + // be called for leafs. + assert(!stack.isEmpty()); + + // LineageCtx + LineageCtx lCtx = (LineageCtx) procCtx; + JoinOperator op = (JoinOperator)nd; + JoinDesc jd = op.getConf(); + + // The input operator to the join is always a reduce sink operator + ReduceSinkOperator inpOp = (ReduceSinkOperator)getParent(stack); + ReduceSinkDesc rd = inpOp.getConf(); + int tag = rd.getTag(); + + // Iterate over the outputs of the join operator and merge the + // dependencies of the columns that corresponding to the tag. + int cnt = 0; + List exprs = jd.getExprs().get((byte)tag); + for(ColumnInfo ci : op.getSchema().getSignature()) { + if (jd.getReversedExprs().get(ci.getInternalName()) != tag) { + continue; + } + + // Otherwise look up the expression corresponding to this ci + ExprNodeDesc expr = exprs.get(cnt++); + lCtx.getIndex().mergeDependency(op, ci, + ExprProcFactory.getExprDependency(lCtx, inpOp, expr)); + } + + return null; + } + + } + + /** + * Processor for Join Operator. + */ + public static class LateralViewJoinLineage extends DefaultLineage implements NodeProcessor { + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + // Assert that there is atleast one item in the stack. This should never + // be called for leafs. + assert(!stack.isEmpty()); + + // LineageCtx + LineageCtx lCtx = (LineageCtx) procCtx; + LateralViewJoinOperator op = (LateralViewJoinOperator)nd; + boolean isUdtfPath = true; + Operator inpOp = getParent(stack); + ArrayList cols = inpOp.getSchema().getSignature(); + + if (inpOp instanceof SelectOperator) { + isUdtfPath = false; + } + + // Dirty hack!! + // For the select path the columns are the ones at the end of the + // current operators schema and for the udtf path the columns are + // at the beginning of the operator schema. + ArrayList out_cols = op.getSchema().getSignature(); + int out_cols_size = out_cols.size(); + int cols_size = cols.size(); + if (isUdtfPath) { + int cnt = 0; + while (cnt < cols_size) { + lCtx.getIndex().mergeDependency(op, out_cols.get(cnt), + lCtx.getIndex().getDependency(inpOp, cols.get(cnt))); + cnt++; + } + } + else { + int cnt = cols_size - 1; + while (cnt >= 0) { + lCtx.getIndex().mergeDependency(op, out_cols.get(out_cols_size - cols_size + cnt), + lCtx.getIndex().getDependency(inpOp, cols.get(cnt))); + cnt--; + } + } + return null; + } + + } + + /** + * Processor for Select operator. + */ + public static class SelectLineage extends DefaultLineage implements NodeProcessor { + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + LineageCtx lctx = (LineageCtx)procCtx; + SelectOperator sop = (SelectOperator)nd; + + // if this is a selStarNoCompute then this select operator + // is treated like a default operator, so just call the super classes + // process method. + if (sop.getConf().isSelStarNoCompute()) { + return super.process(nd, stack, procCtx, nodeOutputs); + } + + // Otherwise we treat this as a normal select operator and look at + // the expressions. + + ArrayList col_infos = sop.getSchema().getSignature(); + int cnt = 0; + for(ExprNodeDesc expr : sop.getConf().getColList()) { + lctx.getIndex().putDependency(sop, col_infos.get(cnt++), + ExprProcFactory.getExprDependency(lctx, getParent(stack), expr)); + } + + return null; + } + + } + + /** + * Processor for GroupBy operator. + */ + public static class GroupByLineage extends DefaultLineage implements NodeProcessor { + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + LineageCtx lctx = (LineageCtx)procCtx; + GroupByOperator gop = (GroupByOperator)nd; + ArrayList col_infos = gop.getSchema().getSignature(); + Operator inpOp = getParent(stack); + int cnt = 0; + + for(ExprNodeDesc expr : gop.getConf().getKeys()) { + lctx.getIndex().putDependency(gop, col_infos.get(cnt++), + ExprProcFactory.getExprDependency(lctx, inpOp, expr)); + } + + for(AggregationDesc agg : gop.getConf().getAggregators()) { + // Concatenate the dependencies of all the parameters to + // create the new dependency + Dependency dep = new Dependency(); + dep.setType(LineageInfo.DependencyType.UDAF); + // TODO: Get the actual string here. + dep.setExpr(null); + LinkedHashSet bci_set = new LinkedHashSet(); + for(ExprNodeDesc expr : agg.getParameters()) { + Dependency expr_dep = ExprProcFactory.getExprDependency(lctx, inpOp, expr); + if (expr_dep != null) { + bci_set.addAll(expr_dep.getBaseCols()); + } + } + + // If the bci_set is empty, this means that the inputs to this + // aggregate function were all constants (e.g. count(1)). In this case + // the aggregate function is just dependent on all the tables that are in + // the dependency list of the input operator. + if (bci_set.isEmpty()) { + Set tai_set = new LinkedHashSet(); + for(ColumnInfo ci : inpOp.getSchema().getSignature()) { + Dependency inp_dep = lctx.getIndex().getDependency(inpOp, ci); + // The dependency can be null as some of the input cis may not have + // been set in case of joins. + if (inp_dep != null) { + for(BaseColumnInfo bci : inp_dep.getBaseCols()) { + tai_set.add(bci.getTabAlias()); + } + } + } + + // Create the BaseColumnInfos and set them in the bci_set + for(TableAliasInfo tai : tai_set) { + BaseColumnInfo bci = new BaseColumnInfo(); + bci.setTabAlias(tai); + // This is set to null to reflect that the dependency is not on any + // particular column of the table. + bci.setColumn(null); + bci_set.add(bci); + } + } + + dep.setBaseCols(new ArrayList(bci_set)); + lctx.getIndex().putDependency(gop, col_infos.get(cnt++), dep); + } + + return null; + } + + } + + /** + * Union processor. + * In this case we call mergeDependency as opposed to putDependency + * in order to account for visits from different parents. + */ + public static class UnionLineage extends DefaultLineage implements NodeProcessor { + + protected static final Log LOG = LogFactory.getLog(OpProcFactory.class.getName()); + + @SuppressWarnings("unchecked") + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + // Assert that there is atleast one item in the stack. This should never + // be called for leafs. + assert(!stack.isEmpty()); + + // LineageCtx + LineageCtx lCtx = (LineageCtx) procCtx; + Operator op = (Operator)nd; + + // Get the row schema of the input operator. + // The row schema of the parent operator + Operator inpOp = getParent(stack); + RowSchema rs = op.getSchema(); + ArrayList inp_cols = inpOp.getSchema().getSignature(); + int cnt = 0; + for(ColumnInfo ci : rs.getSignature()) { + lCtx.getIndex().mergeDependency(op, ci, + lCtx.getIndex().getDependency(inpOp, inp_cols.get(cnt++))); + } + return null; + } + } + + /** + * ReduceSink processor. + */ + public static class ReduceSinkLineage implements NodeProcessor { + + protected static final Log LOG = LogFactory.getLog(OpProcFactory.class.getName()); + + @SuppressWarnings("unchecked") + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + // Assert that there is atleast one item in the stack. This should never + // be called for leafs. + assert(!stack.isEmpty()); + + // LineageCtx + LineageCtx lCtx = (LineageCtx) procCtx; + ReduceSinkOperator rop = (ReduceSinkOperator)nd; + + ArrayList col_infos = rop.getSchema().getSignature(); + Operator inpOp = getParent(stack); + int cnt = 0; + + // The keys are included only in case the reduce sink feeds into + // a group by operator through a chain of forward operators + Operator op = rop.getChildOperators().get(0); + while (op instanceof ForwardOperator) { + op = op.getChildOperators().get(0); + } + + if (op instanceof GroupByOperator) { + for(ExprNodeDesc expr : rop.getConf().getKeyCols()) { + lCtx.getIndex().putDependency(rop, col_infos.get(cnt++), + ExprProcFactory.getExprDependency(lCtx, inpOp, expr)); + } + } + + for(ExprNodeDesc expr : rop.getConf().getValueCols()) { + lCtx.getIndex().putDependency(rop, col_infos.get(cnt++), + ExprProcFactory.getExprDependency(lCtx, inpOp, expr)); + } + + return null; + } + } + + /** + * Default processor. This basically passes the input dependencies as such + * to the output dependencies. + */ + public static class DefaultLineage implements NodeProcessor { + + protected static final Log LOG = LogFactory.getLog(OpProcFactory.class.getName()); + + @SuppressWarnings("unchecked") + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + // Assert that there is atleast one item in the stack. This should never + // be called for leafs. + assert(!stack.isEmpty()); + + // LineageCtx + LineageCtx lCtx = (LineageCtx) procCtx; + Operator op = (Operator)nd; + + // Get the row schema of the input operator. + // The row schema of the parent operator + Operator inpOp = getParent(stack); + RowSchema rs = op.getSchema(); + ArrayList inp_cols = inpOp.getSchema().getSignature(); + int cnt = 0; + for(ColumnInfo ci : rs.getSignature()) { + lCtx.getIndex().putDependency(op, ci, + lCtx.getIndex().getDependency(inpOp, inp_cols.get(cnt++))); + } + return null; + } + } + + public static NodeProcessor getJoinProc() { + return new JoinLineage(); + } + + public static NodeProcessor getLateralViewJoinProc() { + return new LateralViewJoinLineage(); + } + + public static NodeProcessor getTSProc() { + return new TableScanLineage(); + } + + public static NodeProcessor getTransformProc() { + return new TransformLineage(); + } + + public static NodeProcessor getSelProc() { + return new SelectLineage(); + } + + public static NodeProcessor getGroupByProc() { + return new GroupByLineage(); + } + + public static NodeProcessor getUnionProc() { + return new UnionLineage(); + } + + public static NodeProcessor getReduceSinkProc() { + return new ReduceSinkLineage(); + } + + public static NodeProcessor getDefaultProc() { + return new DefaultLineage(); + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (working copy) @@ -41,7 +41,7 @@ /** * SessionState encapsulates common data associated with a session. - * + * * Also provides support for a thread static session object that can be accessed * from any point in the code to interact with the user and to retrieve * configuration information @@ -74,6 +74,20 @@ */ private String commandType; + /** + * Lineage state. + */ + LineageState ls; + + /** + * Get the lineage state stored in this session. + * + * @return LineageState + */ + public LineageState getLineageState() { + return ls; + } + public HiveConf getConf() { return conf; } @@ -96,7 +110,7 @@ public SessionState(HiveConf conf) { this.conf = conf; - + ls = new LineageState(); } public void setCmd(String cmdString) { @@ -117,7 +131,7 @@ /** * Singleton Session object per thread. - * + * **/ private static ThreadLocal tss = new ThreadLocal(); @@ -161,7 +175,7 @@ /** * get hiveHitsory object which does structured logging. - * + * * @return The hive history object */ public HiveHistory getHiveHistory() { @@ -196,11 +210,11 @@ * This class provides helper routines to emit informational and error * messages to the user and log4j files while obeying the current session's * verbosity levels. - * + * * NEVER write directly to the SessionStates standard output other than to * emit result data DO use printInfo and printError provided by LogHelper to * emit non result data strings. - * + * * It is perfectly acceptable to have global static LogHelper objects (for * example - once per module) LogHelper always emits info/error to current * session as required. Index: ql/src/java/org/apache/hadoop/hive/ql/session/LineageState.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/session/LineageState.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/session/LineageState.java (revision 0) @@ -0,0 +1,118 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.session; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.FileSinkOperator; +import org.apache.hadoop.hive.ql.hooks.LineageInfo; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; +import org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index; + +/** + * LineageState. Contains all the information used to generate the + * lineage information for the post execution hooks. + * + */ +public class LineageState { + + /** + * Mapping from the directory name to FileSinkOperator. This + * mapping is generated at the filesink operator creation + * time and is then later used to created the mapping from + * movetask to the set of filesink operators. + */ + private final Map dirToFop; + + /** + * The lineage context index for this query. + */ + private Index index; + + /** + * The lineage info structure that is used to pass the lineage + * information to the hooks. + */ + private final LineageInfo linfo; + + /** + * Constructor. + */ + public LineageState() { + dirToFop = new HashMap(); + linfo = new LineageInfo(); + } + + /** + * Adds a mapping from the load work to the file sink operator. + * + * @param dir The directory name. + * @param fop The file sink operator. + */ + public void mapDirToFop(String dir, FileSinkOperator fop) { + dirToFop.put(dir, fop); + } + + /** + * Set the lineage information for the associated directory. + * + * @param dir The directory containing the query results. + * @param dc The associated data container. + * @param cols The list of columns. + */ + public void setLineage(String dir, DataContainer dc, + List cols) { + // First lookup the file sink operator from the load work. + FileSinkOperator fop = dirToFop.get(dir); + + // Go over the associated fields and look up the dependencies + // by position in the row schema of the filesink operator. + if (fop == null) { + return; + } + + List signature = fop.getSchema().getSignature(); + int i = 0; + for (FieldSchema fs : cols) { + linfo.putDependency(dc, fs, index.getDependency(fop, signature.get(i++))); + } + } + + /** + * Gets the lineage information. + * + * @return LineageInfo. + */ + public LineageInfo getLineageInfo() { + return linfo; + } + + /** + * Sets the index for the lineage state. + * + * @param index The index derived from lineage context. + */ + public void setIndex(Index index) { + this.index = index; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java (revision 0) @@ -0,0 +1,406 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.hooks; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.Table; + +/** + * This class contains the lineage information that is passed + * to the PreExecution hook. + */ +public class LineageInfo implements Serializable { + + /** + * Serial version id. + */ + private static final long serialVersionUID = 1L; + + /** + * Enum to track dependency. This enum has the following values: + * 1. SIMPLE - Indicates that the column is derived from another table column + * with no transformations e.g. T2.c1 = T1.c1. + * 2. UDF - Indicates that the column is derived from a UDF on + * columns on other tables e.g. T2.c1 = T1.c1 + T3.c1. + * 3. UDAF - Indicates that the column is derived from a UDAF on + * other columns. e.g. T2.c1 = sum(T1.c1) group by T1.c2. + * 4. SCRIPT - Indicates that the column is derived from the output + * of a user script through a TRANSFORM, MAP or REDUCE syntax. + * 5. SET - Indicates that the column is derived from SQL set operations + * e.g. UNION ALL. + */ + public static enum DependencyType { + SIMPLE, UDF, UDAF, UDTF, SCRIPT, SET + } + + /** + * Table or Partition data container. We need this class because the output + * of the query can either go to a table or a partition within a table. The + * data container class subsumes both of these. + */ + public static class DataContainer implements Serializable { + + /** + * Serial version id. + */ + private static final long serialVersionUID = 1L; + + /** + * The table in case this container is a table. + */ + private final Table tab; + + /** + * The partition in case this container is a partition. + */ + private final Partition part; + + /** + * Constructor for non partitioned tables. + * + * @param tab The associated table. + */ + public DataContainer(Table tab) { + this.tab = tab; + this.part = null; + } + + /** + * Constructor for a partitioned tables partition. + * + * @param part The associated partition. + */ + public DataContainer(Table tab, Partition part) { + this.tab = tab; + this.part = part; + } + + /** + * Returns true in case this data container is a partition. + * + * @return boolean TRUE if the container is a table partition. + */ + public boolean isPartition() { + return (part != null); + } + + public Table getTable() { + return this.tab; + } + + public Partition getPartition() { + return this.part; + } + } + + /** + * Class that captures the lookup key for the dependency. The dependency + * is from (DataContainer, FieldSchema) to a Dependency structure. This + * class captures the (DataContainer, FieldSchema) tuple. + */ + public static class DependencyKey implements Serializable { + + /** + * Serial version id. + */ + private static final long serialVersionUID = 1L; + + /** + * The data container for this key. + */ + private final DataContainer dc; + + /** + * The field schema for this key. + */ + private final FieldSchema fld; + + /** + * Constructor. + * + * @param dc The associated data container. + * @param fld The associated field schema. + */ + public DependencyKey(DataContainer dc, FieldSchema fld) { + this.dc = dc; + this.fld = fld; + } + + public DataContainer getDataContainer() { + return this.dc; + } + + public FieldSchema getFieldSchema() { + return this.fld; + } + + /* (non-Javadoc) + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((dc == null) ? 0 : dc.hashCode()); + result = prime * result + ((fld == null) ? 0 : fld.hashCode()); + return result; + } + + /* (non-Javadoc) + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DependencyKey other = (DependencyKey) obj; + if (dc != other.dc) { + return false; + } + if (fld != other.fld) { + return false; + } + return true; + } + } + + /** + * Base Column information. + */ + public static class BaseColumnInfo implements Serializable { + + /** + * Serial version id. + */ + private static final long serialVersionUID = 1L; + + /** + * The table and alias info encapsulated in a different class. + */ + private TableAliasInfo tabAlias; + + /** + * The metastore column information. The column can be null + * and that denotes that the expression is dependent on the row + * of the table and not particular column. This can happen in case + * of count(1). + */ + private FieldSchema column; + + /** + * @return the tabAlias + */ + public TableAliasInfo getTabAlias() { + return tabAlias; + } + + /** + * @param tabAlias the tabAlias to set + */ + public void setTabAlias(TableAliasInfo tabAlias) { + this.tabAlias = tabAlias; + } + + /** + * @return the column + */ + public FieldSchema getColumn() { + return column; + } + + /** + * @param column the column to set + */ + public void setColumn(FieldSchema column) { + this.column = column; + } + } + + public static class TableAliasInfo implements Serializable { + + /** + * Serail version id. + */ + private static final long serialVersionUID = 1L; + + /** + * The alias for the table. + */ + private String alias; + + /** + * The metastore table information. + */ + private Table table; + + /** + * @return the alias + */ + public String getAlias() { + return alias; + } + + /** + * @param alias the alias to set + */ + public void setAlias(String alias) { + this.alias = alias; + } + + /** + * @return the table + */ + public Table getTable() { + return table; + } + + /** + * @param table the table to set + */ + public void setTable(Table table) { + this.table = table; + } + } + + /** + * This class tracks the dependency information for the base column. + */ + public static class Dependency implements Serializable { + + /** + * + */ + private static final long serialVersionUID = 1L; + + /** + * The type of dependency. + */ + private DependencyType type; + + /** + * Expression string for the dependency. + */ + private String expr; + + /** + * The list of base columns that the particular column depends on. + */ + private List baseCols; + + /** + * @return the type + */ + public DependencyType getType() { + return type; + } + + /** + * @param type the type to set + */ + public void setType(DependencyType type) { + this.type = type; + } + + /** + * @return the expr + */ + public String getExpr() { + return expr; + } + + /** + * @param expr the expr to set + */ + public void setExpr(String expr) { + this.expr = expr; + } + + /** + * @return the baseCols + */ + public List getBaseCols() { + return baseCols; + } + + /** + * @param basecols the baseCols to set + */ + public void setBaseCols(List baseCols) { + this.baseCols = baseCols; + } + } + + /** + * The map contains an index from the (datacontainer, columnname) to the + * dependency vector for that tuple. This is used to generate the + * dependency vectors during the walk of the operator tree. + */ + protected Map index; + + /** + * Constructor. + */ + public LineageInfo() { + index = new HashMap(); + } + + /** + * Gets the dependency for a table, column tuple. + * @param dc The data container of the column whose dependency is being inspected. + * @param col The column whose dependency is being inspected. + * @return Dependency for that particular table, column tuple. + * null if no dependency is found. + */ + public Dependency getDependency(DataContainer dc, FieldSchema col) { + return index.get(new DependencyKey(dc, col)); + } + + /** + * Puts the dependency for a table, column tuple. + * @param dc The datacontainer whose dependency is being inserted. + * @param col The column whose dependency is being inserted. + * @param dep The dependency. + */ + public void putDependency(DataContainer dc, FieldSchema col, Dependency dep) { + index.put(new DependencyKey(dc, col), dep); + } + + /** + * Gets the iterator on this structure. + * + * @return LineageInfoItereator + */ + public Iterator> iterator() { + return index.entrySet().iterator(); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecute.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecute.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecute.java (working copy) @@ -31,17 +31,20 @@ /** * The run command that is called just before the execution of the query. - * + * * @param sess * The session state. * @param inputs * The set of input tables and partitions. * @param outputs * The set of output tables, partitions, local and hdfs directories. + * @param lInfo + * The column level lineage information. * @param ugi * The user group security information. */ void run(SessionState sess, Set inputs, - Set outputs, UserGroupInformation ugi) throws Exception; + Set outputs, LineageInfo lInfo, + UserGroupInformation ugi) throws Exception; } Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java (working copy) @@ -31,7 +31,7 @@ /** * The run command that is called just before the execution of the query. - * + * * @param sess * The session state. * @param inputs @@ -41,7 +41,8 @@ * @param ugi * The user group security information. */ - void run(SessionState sess, Set inputs, - Set outputs, UserGroupInformation ugi) throws Exception; + public void run(SessionState sess, Set inputs, + Set outputs, UserGroupInformation ugi) + throws Exception; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java (working copy) @@ -121,4 +121,9 @@ } + @Override + public String getName() { + return "LVJ"; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy) @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -39,6 +40,7 @@ import org.apache.hadoop.hive.ql.plan.LoadTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.plan.api.StageType; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.util.StringUtils; /** @@ -151,7 +153,10 @@ } } + // Create a data container + DataContainer dc = null; if (tbd.getPartitionSpec().size() == 0) { + dc = new DataContainer(table.getTTable()); db.loadTable(new Path(tbd.getSourceDir()), tbd.getTable() .getTableName(), tbd.getReplace(), new Path(tbd.getTmpDir())); if (work.getOutputs() != null) { @@ -164,10 +169,16 @@ new Path(tbd.getTmpDir())); Partition partn = db.getPartition(table, tbd.getPartitionSpec(), false); + dc = new DataContainer(table.getTTable(), partn.getTPartition()); if (work.getOutputs() != null) { work.getOutputs().add(new WriteEntity(partn)); } } + + if (SessionState.get() != null) { + SessionState.get().getLineageState() + .setLineage(tbd.getSourceDir(), dc, table.getCols()); + } } return 0; Index: ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java (working copy) @@ -20,8 +20,9 @@ import java.io.Serializable; import java.util.HashSet; -import java.util.Set; +import java.util.List; +import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; @@ -46,6 +47,11 @@ */ protected HashSet outputs; + /** + * List of inserted partitions + */ + protected List movedParts; + public MoveWork() { } Index: ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (working copy) @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.hooks.LineageInfo; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; @@ -73,6 +74,10 @@ * to the outputs here. */ private HashSet outputs; + /** + * Lineage information for the query. + */ + protected LineageInfo linfo; private HashMap idToTableNameMap; @@ -94,6 +99,7 @@ // Note that inputs and outputs can be changed when the query gets executed inputs = sem.getInputs(); outputs = sem.getOutputs(); + linfo = sem.getLineageInfo(); idToTableNameMap = new HashMap(sem.getIdToTableNameMap()); queryId = makeQueryId(); @@ -711,4 +717,21 @@ this.started = started; } + /** + * Gets the lineage information. + * + * @return LineageInfo associated with the query. + */ + public LineageInfo getLineageInfo() { + return linfo; + } + + /** + * Sets the lineage information. + * + * @param linfo The LineageInfo structure that is set in the optimization phase. + */ + public void setLineageInfo(LineageInfo linfo) { + this.linfo = linfo; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java (revision 0) @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.lib; + +import java.util.Stack; + +/** + * Contains common utility functions to manipulate nodes, walkers etc. + */ +public class Utils { + + /** + * Gets the nth ancestor (the parent being the 1st ancestor) in the traversal + * path. n=0 returns the currently visited node. + * + * @param st The stack that encodes the traversal path. + * @param n The value of n (n=0 is the currently visited node). + * + * @return Node The Nth ancestor in the path with respect to the current node. + */ + public static Node getNthAncestor(Stack st, int n) { + assert(st.size() - 1 >= n); + + Stack tmpStack = new Stack(); + for(int i=0; i<=n; i++) + tmpStack.push(st.pop()); + + Node ret_nd = tmpStack.peek(); + + for(int i=0; i<=n; i++) + st.push(tmpStack.pop()); + + assert(tmpStack.isEmpty()); + + return ret_nd; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (working copy) @@ -38,7 +38,6 @@ protected Stack opStack; private final List toWalk = new ArrayList(); - private final Set seenList = new HashSet(); private final HashMap retMap = new HashMap(); private final Dispatcher dispatcher; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy) @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.hooks.LineageInfo; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; @@ -58,14 +59,6 @@ * BaseSemanticAnalyzer. * */ -/** - * BaseSemanticAnalyzer. - * - */ -/** - * BaseSemanticAnalyzer. - * - */ public abstract class BaseSemanticAnalyzer { protected final Hive db; protected final HiveConf conf; @@ -88,6 +81,10 @@ * List of WriteEntities that are passed to the hooks. */ protected HashSet outputs; + /** + * Lineage information for the query. + */ + protected LineageInfo linfo; protected static final String TEXTFILE_INPUT = TextInputFormat.class .getName(); @@ -494,4 +491,22 @@ } } } + + /** + * Gets the lineage information. + * + * @return LineageInfo associated with the query. + */ + public LineageInfo getLineageInfo() { + return linfo; + } + + /** + * Sets the lineage information. + * + * @param linfo The LineageInfo structure that is set in the optimization phase. + */ + public void setLineageInfo(LineageInfo linfo) { + this.linfo = linfo; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -50,11 +50,11 @@ import org.apache.hadoop.hive.ql.exec.ConditionalTask; import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.ql.exec.FetchTask; +import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; -import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; @@ -245,13 +245,15 @@ qb = pctx.getQB(); groupOpToInputTables = pctx.getGroupOpToInputTables(); prunedPartitions = pctx.getPrunedPartitions(); + setLineageInfo(pctx.getLineageInfo()); } public ParseContext getParseContext() { return new ParseContext(conf, qb, ast, opToPartPruner, topOps, topSelOps, - opParseCtx, joinContext, topToTable, loadTableWork, loadFileWork, ctx, - idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer, - groupOpToInputTables, prunedPartitions, opToSamplePruner); + opParseCtx, joinContext, topToTable, loadTableWork, + loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, + listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions, + opToSamplePruner); } @SuppressWarnings("nls") @@ -3143,13 +3145,14 @@ QBMetaData qbm = qb.getMetaData(); Integer dest_type = qbm.getDestTypeForAlias(dest); - Table dest_tab; // destination table if any + Table dest_tab = null; // destination table if any String queryTmpdir = null; // the intermediate destination directory Path dest_path = null; // the final destination directory TableDesc table_desc = null; int currentTableId = 0; boolean isLocal = false; SortBucketRSCtx rsCtx = new SortBucketRSCtx(); + LoadTableDesc ltd = null; switch (dest_type.intValue()) { case QBMetaData.DEST_TABLE: { @@ -3179,9 +3182,10 @@ // Create the work for moving the table if (!isNonNativeTable) { - loadTableWork.add(new LoadTableDesc(queryTmpdir, ctx + ltd = new LoadTableDesc(queryTmpdir, ctx .getExternalTmpFileURI(dest_path.toUri()), table_desc, - new HashMap())); + new HashMap()); + loadTableWork.add(ltd); } if (!outputs.add(new WriteEntity(dest_tab))) { throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES @@ -3205,9 +3209,10 @@ currentTableId = destTableId; destTableId++; - loadTableWork.add(new LoadTableDesc(queryTmpdir, ctx + ltd = new LoadTableDesc(queryTmpdir, ctx .getExternalTmpFileURI(dest_path.toUri()), table_desc, dest_part - .getSpec())); + .getSpec()); + loadTableWork.add(ltd); if (!outputs.add(new WriteEntity(dest_part))) { throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES .getMsg(dest_tab.getTableName() + "@" + dest_part.getName())); @@ -3355,6 +3360,12 @@ rsCtx.isMultiFileSpray(), rsCtx.getNumFiles(), rsCtx.getTotalFiles(), rsCtx.getPartnCols()), fsRS, input), inputRR); + + if (ltd != null && SessionState.get() != null) { + SessionState.get().getLineageState() + .mapDirToFop(ltd.getSourceDir(), (FileSinkOperator)output); + } + LOG.debug("Created FileSink Plan for clause: " + dest + "dest_path: " + dest_path + " row schema: " + inputRR.toString()); @@ -5277,7 +5288,8 @@ colsEqual, alias, rwsch, qb.getMetaData(), null); tableOp = OperatorFactory.getAndMakeChild(new FilterDesc( samplePredicate, true, new sampleDesc(ts.getNumerator(), ts - .getDenominator(), tabBucketCols, true)), top); + .getDenominator(), tabBucketCols, true)), + new RowSchema(rwsch.getColumnInfos()), top); } else { // need to add filter // create tableOp to be filterDesc and set as child to 'top' @@ -5285,7 +5297,8 @@ ExprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null); tableOp = OperatorFactory.getAndMakeChild(new FilterDesc( - samplePredicate, true), top); + samplePredicate, true), + new RowSchema(rwsch.getColumnInfos()), top); } } else { boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE); @@ -5316,7 +5329,8 @@ tableOp = OperatorFactory .getAndMakeChild(new FilterDesc(samplePred, true, new sampleDesc(tsSample.getNumerator(), tsSample - .getDenominator(), tab.getBucketCols(), true)), top); + .getDenominator(), tab.getBucketCols(), true)), + new RowSchema(rwsch.getColumnInfos()), top); LOG.info("No need for sample filter"); } else { // The table is not bucketed, add a dummy filter :: rand() @@ -5331,7 +5345,8 @@ ExprNodeDesc samplePred = genSamplePredicate(tsSample, null, false, alias, rwsch, qb.getMetaData(), randFunc); tableOp = OperatorFactory.getAndMakeChild(new FilterDesc( - samplePred, true), top); + samplePred, true), + new RowSchema(rwsch.getColumnInfos()), top); } } } @@ -5616,8 +5631,9 @@ } else { new ArrayList(); for (LoadTableDesc ltd : loadTableWork) { - mvTask.add(TaskFactory.get(new MoveWork(null, null, ltd, null, false), - conf)); + Task tsk = TaskFactory.get(new MoveWork(null, null, ltd, null, false), + conf); + mvTask.add(tsk); } boolean oneLoadFile = true; @@ -5950,8 +5966,8 @@ } ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner, - topOps, topSelOps, opParseCtx, joinContext, topToTable, loadTableWork, - loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, + topOps, topSelOps, opParseCtx, joinContext, topToTable, + loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions, opToSamplePruner); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.hooks.LineageInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -48,7 +49,7 @@ * populated. Note that since the parse context contains the operator tree, it * can be easily retrieved by the next optimization step or finally for task * generation after the plan has been completely optimized. - * + * **/ public class ParseContext { @@ -75,6 +76,11 @@ private Map> groupOpToInputTables; private Map prunedPartitions; + /** + * The lineage information. + */ + private LineageInfo lInfo; + // is set to true if the expression only contains partitioning columns and not // any other column reference. // This is used to optimize select * from table where ... scenario, when the @@ -105,6 +111,7 @@ * context needed join processing (map join specifically) * @param topToTable * the top tables being processed + * @param fopToTable the table schemas that are being inserted into * @param loadTableWork * list of destination tables being loaded * @param loadFileWork @@ -383,7 +390,7 @@ /** * Sets the hasNonPartCols flag. - * + * * @param val */ public void setHasNonPartCols(boolean val) { @@ -443,6 +450,24 @@ this.prunedPartitions = prunedPartitions; } + /** + * Sets the lineage information. + * + * @param lInfo The lineage information. + */ + public void setLineageInfo(LineageInfo lInfo) { + this.lInfo = lInfo; + } + + /** + * Gets the associated lineage information. + * + * @return LineageInfo + */ + public LineageInfo getLineageInfo() { + return lInfo; + } + public Map getMapJoinContext() { return mapJoinContext; } Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 927279) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -162,7 +162,7 @@ List lst = sem.getResultSchema(); schema = new Schema(lst, null); } else if (sem.getFetchTask() != null) { - FetchTask ft = (FetchTask) sem.getFetchTask(); + FetchTask ft = sem.getFetchTask(); TableDesc td = ft.getTblDesc(); // partitioned tables don't have tableDesc set on the FetchTask. Instead // they have a list of PartitionDesc objects, each with a table desc. @@ -294,7 +294,7 @@ try { ctx = new Context(conf); - + ParseDriver pd = new ParseDriver(); ASTNode tree = pd.parse(command, ctx); tree = ParseUtils.findRootNonNullToken(tree); @@ -317,25 +317,25 @@ schema = getSchema(sem, conf); // Serialize the query plan - // get temp file name and remove file: + // get temp file name and remove file: String queryPlanFileName = ctx.getLocalScratchDir() + Path.SEPARATOR_CHAR + "queryplan.xml"; LOG.info("query plan = " + queryPlanFileName); queryPlanFileName = new Path(queryPlanFileName).toUri().getPath(); - - // serialize the queryPlan + + // serialize the queryPlan FileOutputStream fos = new FileOutputStream(queryPlanFileName); Utilities.serializeQueryPlan(plan, fos); fos.close(); - - // deserialize the queryPlan + + // deserialize the queryPlan FileInputStream fis = new FileInputStream(queryPlanFileName); QueryPlan newPlan = Utilities.deserializeQueryPlan(fis, conf); fis.close(); - + // Use the deserialized plan plan = newPlan; - + // initialize FetchTask right here if (plan.getFetchTask() != null) { plan.getFetchTask().initialize(conf, plan, null); @@ -540,6 +540,7 @@ // Get all the post execution hooks and execute them. for (PostExecute peh : getPostExecHooks()) { peh.run(SessionState.get(), plan.getInputs(), plan.getOutputs(), + (SessionState.get() != null ? SessionState.get().getLineageState().getLineageInfo() : null), UnixUserGroupInformation.readFromConf(conf, UnixUserGroupInformation.UGI_PROPERTY_NAME)); } @@ -676,7 +677,7 @@ public boolean getResults(ArrayList res) throws IOException { if (plan != null && plan.getFetchTask() != null) { - FetchTask ft = (FetchTask) plan.getFetchTask(); + FetchTask ft = plan.getFetchTask(); ft.setMaxRows(maxRows); return ft.fetch(res); }