Index: hbase-handler/src/test/results/hbase_queries.q.out =================================================================== --- hbase-handler/src/test/results/hbase_queries.q.out (revision 990244) +++ hbase-handler/src/test/results/hbase_queries.q.out (working copy) @@ -20,15 +20,15 @@ key int from deserializer value string from deserializer -Detailed Table Information Table(tableName:hbase_table_1, dbName:default, owner:jsichi, createTime:1282173927, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/data/users/jsichi/open/hive-trunk/build/hbase-handler/test/data/warehouse/hbase_table_1, inputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat, outputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.hbase.HBaseSerDe, parameters:{serialization.format=1, hbase.columns.mapping=cf:string}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{hbase.table.name=hbase_table_0, transient_lastDdlTime=1282173927, storage_handler=org.apache.hadoop.hive.hbase.HBaseStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:hbase_table_1, dbName:default, owner:jsichi, createTime:1282870784, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/data/users/jsichi/open/hive-trunk/build/hbase-handler/test/data/warehouse/hbase_table_1, inputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat, outputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.hbase.HBaseSerDe, parameters:{serialization.format=1, hbase.columns.mapping=cf:string}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{hbase.table.name=hbase_table_0, transient_lastDdlTime=1282870784, storage_handler=org.apache.hadoop.hive.hbase.HBaseStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: select * from hbase_table_1 PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_1 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-27_525_1549744759045649155/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-44_959_664484390867638581/-mr-10000 POSTHOOK: query: select * from hbase_table_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_1 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-27_525_1549744759045649155/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-44_959_664484390867638581/-mr-10000 PREHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0 @@ -198,7 +198,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/jsichi/hive_2010-08-18_16-25-31_035_4609595925459515104/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_17-59-48_518_6224953898347253125/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -237,7 +237,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_1 PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-31_205_3758488585778891870/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-48_687_7267893285176645949/-mr-10000 POSTHOOK: query: SELECT Y.* FROM (SELECT hbase_table_1.* FROM hbase_table_1) x @@ -248,7 +248,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_1 POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-31_205_3758488585778891870/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-48_687_7267893285176645949/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -382,7 +382,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/jsichi/hive_2010-08-18_16-25-39_199_9206397980280656193/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_17-59-56_537_4371415923720310528/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -418,9 +418,9 @@ ON (x.key = Y.key) ORDER BY key,value PREHOOK: type: QUERY +PREHOOK: Input: default@hbase_table_1 PREHOOK: Input: default@hbase_table_2 -PREHOOK: Input: default@hbase_table_1 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-39_390_6285199586703567793/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-56_729_1646077641371437252/-mr-10000 POSTHOOK: query: SELECT Y.* FROM (SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x @@ -429,9 +429,9 @@ ON (x.key = Y.key) ORDER BY key,value POSTHOOK: type: QUERY +POSTHOOK: Input: default@hbase_table_1 POSTHOOK: Input: default@hbase_table_2 -POSTHOOK: Input: default@hbase_table_1 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-39_390_6285199586703567793/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-56_729_1646077641371437252/-mr-10000 104 val_104 114 val_114 116 val_116 @@ -462,48 +462,48 @@ PREHOOK: type: QUERY PREHOOK: Input: default@empty_hbase_table PREHOOK: Input: default@empty_normal_table -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-50_518_3658678318133495310/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-07_075_2779968526096451886/-mr-10000 POSTHOOK: query: select * from (select count(1) as c from empty_normal_table union all select count(1) as c from empty_hbase_table) x order by c POSTHOOK: type: QUERY POSTHOOK: Input: default@empty_hbase_table POSTHOOK: Input: default@empty_normal_table -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-50_518_3658678318133495310/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-07_075_2779968526096451886/-mr-10000 0 0 PREHOOK: query: select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c PREHOOK: type: QUERY PREHOOK: Input: default@empty_normal_table PREHOOK: Input: default@hbase_table_1 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-17_084_9167375564007367329/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-16_419_1002434022290170784/-mr-10000 POSTHOOK: query: select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c POSTHOOK: type: QUERY POSTHOOK: Input: default@empty_normal_table POSTHOOK: Input: default@hbase_table_1 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-17_084_9167375564007367329/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-16_419_1002434022290170784/-mr-10000 0 155 PREHOOK: query: select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c PREHOOK: type: QUERY PREHOOK: Input: default@empty_hbase_table PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-26_377_966123060596040797/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-25_859_2570695575641705647/-mr-10000 POSTHOOK: query: select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c POSTHOOK: type: QUERY POSTHOOK: Input: default@empty_hbase_table POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-26_377_966123060596040797/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-25_859_2570695575641705647/-mr-10000 0 500 PREHOOK: query: select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c PREHOOK: type: QUERY +PREHOOK: Input: default@hbase_table_1 PREHOOK: Input: default@src -PREHOOK: Input: default@hbase_table_1 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-35_671_2780181685990528166/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-35_201_7212376066806846352/-mr-10000 POSTHOOK: query: select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c POSTHOOK: type: QUERY +POSTHOOK: Input: default@hbase_table_1 POSTHOOK: Input: default@src -POSTHOOK: Input: default@hbase_table_1 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-35_671_2780181685990528166/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-35_201_7212376066806846352/-mr-10000 155 500 PREHOOK: query: CREATE TABLE hbase_table_3(key int, value string, count int) @@ -703,20 +703,20 @@ PREHOOK: query: select count(1) from hbase_table_3 PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_3 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-55_167_90595836337066043/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-55_042_4227305362165776181/-mr-10000 POSTHOOK: query: select count(1) from hbase_table_3 POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_3 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-55_167_90595836337066043/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-55_042_4227305362165776181/-mr-10000 155 PREHOOK: query: select * from hbase_table_3 order by key, value limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_3 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-58_706_203500985498924230/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-58_660_6615166785753648742/-mr-10000 POSTHOOK: query: select * from hbase_table_3 order by key, value limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_3 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-26-58_706_203500985498924230/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-58_660_6615166785753648742/-mr-10000 0 val_0 3 2 val_2 1 4 val_4 1 @@ -725,11 +725,11 @@ PREHOOK: query: select key, count from hbase_table_3 order by key, count desc limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_3 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-02_230_8846971062758664211/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-02_229_7032023633895650392/-mr-10000 POSTHOOK: query: select key, count from hbase_table_3 order by key, count desc limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_3 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-02_230_8846971062758664211/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-02_229_7032023633895650392/-mr-10000 0 3 2 1 4 1 @@ -765,11 +765,11 @@ PREHOOK: query: SELECT * FROM hbase_table_4 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_4 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-10_988_7755611253183514631/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-11_244_2234692314536262670/-mr-10000 POSTHOOK: query: SELECT * FROM hbase_table_4 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_4 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-10_988_7755611253183514631/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-11_244_2234692314536262670/-mr-10000 98 val_98 99 100 100 val_100 101 102 PREHOOK: query: DROP TABLE hbase_table_5 @@ -790,11 +790,11 @@ PREHOOK: query: SELECT * FROM hbase_table_5 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_5 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-14_698_7762836144438891670/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-15_009_1646727916221485033/-mr-10000 POSTHOOK: query: SELECT * FROM hbase_table_5 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_5 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-14_698_7762836144438891670/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-15_009_1646727916221485033/-mr-10000 98 {"b":"val_98","c":"99"} 100 {"b":"val_100","c":"101"} PREHOOK: query: DROP TABLE hbase_table_6 @@ -827,11 +827,11 @@ PREHOOK: query: SELECT * FROM hbase_table_6 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_6 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-23_588_6722865455199798152/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-23_977_5712476856221236690/-mr-10000 POSTHOOK: query: SELECT * FROM hbase_table_6 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_6 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-23_588_6722865455199798152/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-23_977_5712476856221236690/-mr-10000 98 {"val_98":"98"} 100 {"val_100":"100"} PREHOOK: query: DROP TABLE hbase_table_7 @@ -866,11 +866,11 @@ PREHOOK: query: SELECT * FROM hbase_table_7 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_7 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-32_378_331084097550140091/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-32_885_9063305271253836242/-mr-10000 POSTHOOK: query: SELECT * FROM hbase_table_7 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_7 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-32_378_331084097550140091/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-32_885_9063305271253836242/-mr-10000 {"VAL_98":"99.0","val_98":"98"} 98 {"VAL_100":"101.0","val_100":"100"} 100 PREHOOK: query: DROP TABLE hbase_table_8 @@ -903,11 +903,11 @@ PREHOOK: query: SELECT * FROM hbase_table_8 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@hbase_table_8 -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-41_278_4692141557051029281/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-41_786_3914765665154306140/-mr-10000 POSTHOOK: query: SELECT * FROM hbase_table_8 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@hbase_table_8 -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-27-41_278_4692141557051029281/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-41_786_3914765665154306140/-mr-10000 98 val_98 99 100 100 val_100 101 102 PREHOOK: query: DROP TABLE hbase_table_1 Index: hbase-handler/src/test/results/hbase_joins.q.out =================================================================== --- hbase-handler/src/test/results/hbase_joins.q.out (revision 990244) +++ hbase-handler/src/test/results/hbase_joins.q.out (working copy) @@ -86,104 +86,104 @@ PREHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.key) PREHOOK: type: QUERY +PREHOOK: Input: default@countries PREHOOK: Input: default@users -PREHOOK: Input: default@countries -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-35_518_6582730260204141679/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-44_025_3464030805185795112/-mr-10000 POSTHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@countries POSTHOOK: Input: default@users -POSTHOOK: Input: default@countries -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-35_518_6582730260204141679/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-44_025_3464030805185795112/-mr-10000 user1 USA United States USA PREHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.country) PREHOOK: type: QUERY +PREHOOK: Input: default@countries PREHOOK: Input: default@users -PREHOOK: Input: default@countries -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-39_508_6031616226903764593/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-49_449_2533239955498825412/-mr-10000 POSTHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.country) POSTHOOK: type: QUERY +POSTHOOK: Input: default@countries POSTHOOK: Input: default@users -POSTHOOK: Input: default@countries -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-39_508_6031616226903764593/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-49_449_2533239955498825412/-mr-10000 user1 USA United States USA PREHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country_id = c.country_id) PREHOOK: type: QUERY +PREHOOK: Input: default@countries PREHOOK: Input: default@users -PREHOOK: Input: default@countries -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-43_394_7124445524751329949/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-54_460_9134325599532847572/-mr-10000 POSTHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country_id = c.country_id) POSTHOOK: type: QUERY +POSTHOOK: Input: default@countries POSTHOOK: Input: default@users -POSTHOOK: Input: default@countries -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-43_394_7124445524751329949/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-54_460_9134325599532847572/-mr-10000 PREHOOK: query: SELECT u.key, u.state, s.name FROM users u JOIN states s ON (u.state = s.key) PREHOOK: type: QUERY +PREHOOK: Input: default@states PREHOOK: Input: default@users -PREHOOK: Input: default@states -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-47_258_669944869681282290/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-59_427_3646437485215925564/-mr-10000 POSTHOOK: query: SELECT u.key, u.state, s.name FROM users u JOIN states s ON (u.state = s.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@states POSTHOOK: Input: default@users -POSTHOOK: Input: default@states -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-47_258_669944869681282290/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-58-59_427_3646437485215925564/-mr-10000 user1 IA Iowa PREHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.key) PREHOOK: type: QUERY +PREHOOK: Input: default@countries PREHOOK: Input: default@users -PREHOOK: Input: default@countries -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-51_124_1283622353907376927/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-03_357_736778343063311968/-mr-10000 POSTHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@countries POSTHOOK: Input: default@users -POSTHOOK: Input: default@countries -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-51_124_1283622353907376927/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-03_357_736778343063311968/-mr-10000 user1 USA United States USA PREHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.country) PREHOOK: type: QUERY +PREHOOK: Input: default@countries PREHOOK: Input: default@users -PREHOOK: Input: default@countries -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-55_006_4907179709145458200/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-08_313_7684989920596569472/-mr-10000 POSTHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country = c.country) POSTHOOK: type: QUERY +POSTHOOK: Input: default@countries POSTHOOK: Input: default@users -POSTHOOK: Input: default@countries -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-55_006_4907179709145458200/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-08_313_7684989920596569472/-mr-10000 user1 USA United States USA PREHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country_id = c.country_id) PREHOOK: type: QUERY +PREHOOK: Input: default@countries PREHOOK: Input: default@users -PREHOOK: Input: default@countries -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-59_926_3403340219286631512/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-13_256_4291980393265625395/-mr-10000 POSTHOOK: query: SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c ON (u.country_id = c.country_id) POSTHOOK: type: QUERY +POSTHOOK: Input: default@countries POSTHOOK: Input: default@users -POSTHOOK: Input: default@countries -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-24-59_926_3403340219286631512/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-13_256_4291980393265625395/-mr-10000 PREHOOK: query: SELECT u.key, u.state, s.name FROM users u JOIN states s ON (u.state = s.key) PREHOOK: type: QUERY +PREHOOK: Input: default@states PREHOOK: Input: default@users -PREHOOK: Input: default@states -PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-03_662_70234471203920991/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-18_206_1231084557369200625/-mr-10000 POSTHOOK: query: SELECT u.key, u.state, s.name FROM users u JOIN states s ON (u.state = s.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@states POSTHOOK: Input: default@users -POSTHOOK: Input: default@states -POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-18_16-25-03_662_70234471203920991/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-18_206_1231084557369200625/-mr-10000 user1 IA Iowa PREHOOK: query: DROP TABLE users PREHOOK: type: DROPTABLE Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out_0.17 (working copy) @@ -130,7 +130,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,12 +141,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351834 + transient_lastDdlTime 1282940394 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -204,7 +204,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -215,12 +215,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351834 + transient_lastDdlTime 1282940394 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -229,15 +229,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -249,12 +249,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351829 + transient_lastDdlTime 1282940389 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -266,12 +266,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351829 + transient_lastDdlTime 1282940389 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -283,14 +283,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -300,20 +300,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351834 + transient_lastDdlTime 1282940394 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -329,9 +329,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -342,12 +342,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351834 + transient_lastDdlTime 1282940394 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -358,12 +358,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351834 + transient_lastDdlTime 1282940394 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -372,7 +372,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-50-34_268_9178399648757251837/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-54_261_3709163670151055576/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -383,12 +383,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351834 + transient_lastDdlTime 1282940394 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -400,16 +400,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] @@ -417,11 +417,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-50-44_506_2681512743853298333/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-03_719_6665293727490387231/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-50-44_506_2681512743853298333/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-03_719_6665293727490387231/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ] @@ -447,16 +447,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -470,11 +470,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-03_448_5828100812224598669/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-18_384_3865621339601729806/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-03_448_5828100812224598669/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-18_384_3865621339601729806/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -511,16 +511,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-11_012_1132409568718382985/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-24_983_6691574317220546085/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-11_012_1132409568718382985/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-24_983_6691574317220546085/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -618,7 +618,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -629,12 +629,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351863 + transient_lastDdlTime 1282940418 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -682,7 +682,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -693,12 +693,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351863 + transient_lastDdlTime 1282940418 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -707,15 +707,15 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -729,13 +729,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351832 + transient_lastDdlTime 1282940392 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -747,13 +747,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351832 + transient_lastDdlTime 1282940392 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -765,14 +765,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -782,20 +782,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351863 + transient_lastDdlTime 1282940418 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -811,9 +811,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -824,12 +824,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351863 + transient_lastDdlTime 1282940418 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -840,12 +840,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351863 + transient_lastDdlTime 1282940418 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -854,7 +854,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-51-16_201_2116965400320957487/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-20-28_849_1096973413691523770/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -865,12 +865,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351863 + transient_lastDdlTime 1282940418 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -882,16 +882,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -911,11 +911,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-25_949_2229692064745753127/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-37_225_250073906219450831/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-25_949_2229692064745753127/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-37_225_250073906219450831/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -965,16 +965,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] @@ -1000,11 +1000,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-43_409_5319562416833462836/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-51_635_932709049820822575/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-43_409_5319562416833462836/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-51_635_932709049820822575/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1065,16 +1065,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-50_791_5994428915085779264/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-58_123_3810133318564098124/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-51-50_791_5994428915085779264/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-20-58_123_3810133318564098124/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/union14.q.out =================================================================== --- ql/src/test/results/clientpositive/union14.q.out (revision 990244) +++ ql/src/test/results/clientpositive/union14.q.out (working copy) @@ -47,7 +47,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1033881589/10002 + file:/tmp/jsichi/hive_2010-08-26_17-07-20_757_8607585855873415399/-mr-10002 Union Select Operator expressions: @@ -75,7 +75,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/1033881589/10003 + file:/tmp/jsichi/hive_2010-08-26_17-07-20_757_8607585855873415399/-mr-10003 Union Select Operator expressions: @@ -177,17 +177,17 @@ select 'tst1' as key, cast(count(1) as string) as value from src s1) unionsrc group by unionsrc.key PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/156140549/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-07-20_875_6103598893495788063/-mr-10000 POSTHOOK: query: select unionsrc.key, count(1) FROM (select s2.key as key, s2.value as value from src1 s2 UNION ALL select 'tst1' as key, cast(count(1) as string) as value from src s1) unionsrc group by unionsrc.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/156140549/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-07-20_875_6103598893495788063/-mr-10000 10 128 1 146 1 Index: ql/src/test/results/clientpositive/join32.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/join32.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/join32.q.out_0.17 (working copy) @@ -48,7 +48,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/nzhang/hive_2010-08-18_12-00-40_705_5960721321942434914/-mr-10003 + directory: file:/tmp/jsichi/hive_2010-08-27_13-52-11_902_9082359192091349153/-mr-10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -84,7 +84,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/nzhang/hive_2010-08-18_12-00-40_705_5960721321942434914/-mr-10003 + directory: file:/tmp/jsichi/hive_2010-08-27_13-52-11_902_9082359192091349153/-mr-10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -97,9 +97,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src [y] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/src [y] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,12 +110,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157853 + transient_lastDdlTime 1282940176 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -126,12 +126,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157853 + transient_lastDdlTime 1282940176 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -139,7 +139,7 @@ Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/tmp/nzhang/hive_2010-08-18_12-00-40_705_5960721321942434914/-mr-10003 + file:/tmp/jsichi/hive_2010-08-27_13-52-11_902_9082359192091349153/-mr-10003 Select Operator expressions: expr: _col0 @@ -182,7 +182,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -193,12 +193,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158040 + transient_lastDdlTime 1282942331 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -261,7 +261,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -272,21 +272,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158040 + transient_lastDdlTime 1282942331 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/tmp/nzhang/hive_2010-08-18_12-00-40_705_5960721321942434914/-mr-10003 [file:/tmp/nzhang/hive_2010-08-18_12-00-40_705_5960721321942434914/-mr-10003] + file:/tmp/jsichi/hive_2010-08-27_13-52-11_902_9082359192091349153/-mr-10003 [file:/tmp/jsichi/hive_2010-08-27_13-52-11_902_9082359192091349153/-mr-10003] Path -> Partition: - file:/tmp/nzhang/hive_2010-08-18_12-00-40_705_5960721321942434914/-mr-10003 + file:/tmp/jsichi/hive_2010-08-27_13-52-11_902_9082359192091349153/-mr-10003 Partition base file name: -mr-10003 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -310,14 +310,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -327,20 +327,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158040 + transient_lastDdlTime 1282942331 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -356,9 +356,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -369,12 +369,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158040 + transient_lastDdlTime 1282942331 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -385,12 +385,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158040 + transient_lastDdlTime 1282942331 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -399,7 +399,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-40_705_5960721321942434914/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-52-11_902_9082359192091349153/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -410,12 +410,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158040 + transient_lastDdlTime 1282942331 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -427,18 +427,18 @@ FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@src PREHOOK: Input: default@src1 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,z) */ x.key, z.value, y.value FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] @@ -446,11 +446,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-49_444_5311228476742662204/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-52-18_581_2705301025151045736/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-49_444_5311228476742662204/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-52-18_581_2705301025151045736/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join38.q.out =================================================================== --- ql/src/test/results/clientpositive/join38.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join38.q.out (working copy) @@ -26,11 +26,11 @@ PREHOOK: query: select * from tmp PREHOOK: type: QUERY PREHOOK: Input: default@tmp -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_12-59-37_683_5319482143791387584/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-00-50_066_9191331330135572186/-mr-10000 POSTHOOK: query: select * from tmp POSTHOOK: type: QUERY POSTHOOK: Input: default@tmp -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_12-59-37_683_5319482143791387584/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-00-50_066_9191331330135572186/-mr-10000 POSTHOOK: Lineage: tmp.col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col10 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -138,7 +138,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_12-59-37_907_1369838299280728635/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-00-50_333_7198447322152213728/-mr-10002 Select Operator expressions: expr: _col1 @@ -224,17 +224,17 @@ where b.col11 = 111 group by a.value, b.col5 PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@tmp -PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_12-59-38_019_4750710911180862739/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-00-50_441_1604865558910370709/-mr-10000 POSTHOOK: query: FROM src a JOIN tmp b ON (a.key = b.col11) SELECT /*+ MAPJOIN(a) */ a.value, b.col5, count(1) as count where b.col11 = 111 group by a.value, b.col5 POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@tmp -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_12-59-38_019_4750710911180862739/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-00-50_441_1604865558910370709/-mr-10000 POSTHOOK: Lineage: tmp.col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col10 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join_nulls.q.out =================================================================== --- ql/src/test/results/clientpositive/join_nulls.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join_nulls.q.out (working copy) @@ -23,11 +23,11 @@ PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-17_243_2644277693400732372/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-48_124_7015383824487148614/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-17_243_2644277693400732372/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-48_124_7015383824487148614/-mr-10000 NULL 35 NULL 35 NULL 35 48 NULL NULL 35 100 100 @@ -40,11 +40,11 @@ PREHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-19_784_5319048142201987096/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-51_137_5392106147060915386/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-19_784_5319048142201987096/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-51_137_5392106147060915386/-mr-10000 NULL 35 NULL 35 NULL 35 48 NULL NULL 35 100 100 @@ -57,11 +57,11 @@ PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-22_077_1127144305944050770/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-54_114_1931885903904719968/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-22_077_1127144305944050770/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-54_114_1931885903904719968/-mr-10000 NULL 35 NULL 35 NULL 35 48 NULL NULL 35 100 100 @@ -74,106 +74,106 @@ PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-24_424_3529043878133127087/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-57_092_6147186374641397090/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-24_424_3529043878133127087/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-57_092_6147186374641397090/-mr-10000 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-26_755_1023724496806721215/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-00_166_3374165725343302038/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-26_755_1023724496806721215/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-00_166_3374165725343302038/-mr-10000 48 NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-29_041_5563395763782073560/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-03_313_6675493236971514079/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-29_041_5563395763782073560/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-03_313_6675493236971514079/-mr-10000 NULL 35 NULL 35 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-31_315_7618449693784281411/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-06_385_8124137130909821070/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-31_315_7618449693784281411/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-06_385_8124137130909821070/-mr-10000 NULL 35 NULL NULL 48 NULL NULL NULL 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-33_682_9012862787524525556/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-09_467_5872921360456775671/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-33_682_9012862787524525556/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-09_467_5872921360456775671/-mr-10000 48 NULL NULL NULL NULL 35 NULL 35 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-35_913_1368787467713976651/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-12_616_374281490034878641/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-35_913_1368787467713976651/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-12_616_374281490034878641/-mr-10000 NULL 35 NULL NULL 48 NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-38_139_507910550329126296/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-15_826_7832828975838305207/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-38_139_507910550329126296/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-15_826_7832828975838305207/-mr-10000 NULL NULL 48 NULL NULL NULL NULL 35 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-40_380_5752206579111501535/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-18_990_932824536088686525/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-40_380_5752206579111501535/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-18_990_932824536088686525/-mr-10000 NULL NULL NULL 35 48 NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-42_711_7203915333997653053/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-22_127_7819363622946760351/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-42_711_7203915333997653053/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-22_127_7819363622946760351/-mr-10000 NULL NULL 48 NULL NULL 35 NULL 35 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-45_005_2097190139910462757/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-25_224_8317788697567724783/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-45_005_2097190139910462757/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-25_224_8317788697567724783/-mr-10000 NULL 35 NULL NULL NULL NULL 48 NULL NULL NULL NULL 35 @@ -182,11 +182,11 @@ PREHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-47_257_7323115616296556466/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-28_320_6340316978542429365/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-47_257_7323115616296556466/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-28_320_6340316978542429365/-mr-10000 NULL 35 NULL NULL NULL NULL NULL 35 48 NULL 48 NULL @@ -194,11 +194,11 @@ PREHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-49_511_2835828582770697326/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-31_394_8318002319095939201/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-49_511_2835828582770697326/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-31_394_8318002319095939201/-mr-10000 48 NULL NULL NULL NULL NULL 48 NULL NULL 35 NULL 35 @@ -206,44 +206,44 @@ PREHOOK: query: SELECT * from myinput1 a LEFT OUTER JOIN myinput1 b ON (a.value=b.value) RIGHT OUTER JOIN myinput1 c ON (b.value=c.value) PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-51_722_5486518368735387317/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-34_512_1005205187544184747/-mr-10000 POSTHOOK: query: SELECT * from myinput1 a LEFT OUTER JOIN myinput1 b ON (a.value=b.value) RIGHT OUTER JOIN myinput1 c ON (b.value=c.value) POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-51_722_5486518368735387317/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-34_512_1005205187544184747/-mr-10000 NULL NULL NULL NULL 48 NULL NULL 35 NULL 35 NULL 35 100 100 100 100 100 100 PREHOOK: query: SELECT * from myinput1 a RIGHT OUTER JOIN myinput1 b ON (a.value=b.value) LEFT OUTER JOIN myinput1 c ON (b.value=c.value) PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-54_007_6526115910610177518/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-37_711_6340949506772786686/-mr-10000 POSTHOOK: query: SELECT * from myinput1 a RIGHT OUTER JOIN myinput1 b ON (a.value=b.value) LEFT OUTER JOIN myinput1 c ON (b.value=c.value) POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-54_007_6526115910610177518/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-37_711_6340949506772786686/-mr-10000 NULL NULL 48 NULL NULL NULL NULL 35 NULL 35 NULL 35 100 100 100 100 100 100 PREHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b RIGHT OUTER JOIN myinput1 c ON a.value = b.value and b.value = c.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-56_301_1180499696568055707/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-40_948_2794890176126244018/-mr-10000 POSTHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b RIGHT OUTER JOIN myinput1 c ON a.value = b.value and b.value = c.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-02-56_301_1180499696568055707/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-40_948_2794890176126244018/-mr-10000 NULL NULL NULL NULL 48 NULL NULL 35 NULL 35 NULL 35 100 100 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-00_784_8177108597645959542/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-46_973_5529249160800115819/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-00_784_8177108597645959542/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-46_973_5529249160800115819/-mr-10000 NULL 35 NULL 35 48 NULL NULL 35 100 100 NULL 35 @@ -256,124 +256,124 @@ PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-02_995_5954257082995484423/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-49_963_1000428859740736851/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-02_995_5954257082995484423/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-49_963_1000428859740736851/-mr-10000 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-05_179_893551064505374226/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-52_943_6089957004261811143/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-05_179_893551064505374226/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-52_943_6089957004261811143/-mr-10000 48 NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-07_429_2356564524660274058/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-55_914_2591780807247412588/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-07_429_2356564524660274058/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-55_914_2591780807247412588/-mr-10000 NULL 35 NULL 35 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-09_626_7027313822877080506/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-58_880_4987797610511570601/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-09_626_7027313822877080506/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-03-58_880_4987797610511570601/-mr-10000 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-11_809_1536686550822126228/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-01_842_9018417136841199963/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-11_809_1536686550822126228/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-01_842_9018417136841199963/-mr-10000 48 NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-14_070_2576009303176359392/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-04_825_5836817454953993059/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-14_070_2576009303176359392/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-04_825_5836817454953993059/-mr-10000 NULL 35 NULL 35 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-16_258_3601408426081449535/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-07_804_5966018968689338722/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-16_258_3601408426081449535/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-07_804_5966018968689338722/-mr-10000 NULL 35 NULL NULL 48 NULL NULL NULL 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-18_463_6253242788418262785/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-10_784_2095633421816713692/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-18_463_6253242788418262785/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-10_784_2095633421816713692/-mr-10000 NULL 35 NULL NULL 48 NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-20_654_6218349195280704470/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-13_756_806948691912832873/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-20_654_6218349195280704470/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-13_756_806948691912832873/-mr-10000 NULL 35 NULL 35 48 NULL NULL NULL 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-22_924_6570219390510773741/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-16_720_3478071990295891514/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-22_924_6570219390510773741/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-16_720_3478071990295891514/-mr-10000 NULL NULL NULL 35 NULL NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-25_161_1733690523870724803/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-19_692_9213491013813899575/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-25_161_1733690523870724803/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-19_692_9213491013813899575/-mr-10000 NULL NULL NULL 35 48 NULL 48 NULL 100 100 100 100 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.value = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-27_372_7774596545760682849/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-22_674_3153813260067204961/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.value = b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-27_372_7774596545760682849/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-22_674_3153813260067204961/-mr-10000 NULL 35 NULL 35 NULL NULL 48 NULL 100 100 100 100 @@ -410,11 +410,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a JOIN smb_input1 b ON a.key = b.key ORDER BY a.key PREHOOK: type: QUERY PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-30_243_4546985922226107164/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-28_378_3637991343439427870/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a JOIN smb_input1 b ON a.key = b.key ORDER BY a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-30_243_4546985922226107164/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-28_378_3637991343439427870/-mr-10000 48 NULL 48 NULL 100 100 100 100 148 NULL 148 NULL @@ -422,11 +422,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a RIGHT OUTER JOIN smb_input1 b ON a.key = b.key ORDER BY b.key, b.value PREHOOK: type: QUERY PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-34_575_8944309786058146271/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-34_285_4680092571717814403/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a RIGHT OUTER JOIN smb_input1 b ON a.key = b.key ORDER BY b.key, b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-34_575_8944309786058146271/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-34_285_4680092571717814403/-mr-10000 NULL NULL NULL 35 NULL NULL NULL 135 48 NULL 48 NULL @@ -436,11 +436,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a JOIN smb_input1 b ON a.key = b.key ORDER BY a.key PREHOOK: type: QUERY PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-38_873_5820673626777323488/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-40_182_8403671338047239743/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a JOIN smb_input1 b ON a.key = b.key ORDER BY a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-38_873_5820673626777323488/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-40_182_8403671338047239743/-mr-10000 48 NULL 48 NULL 100 100 100 100 148 NULL 148 NULL @@ -448,11 +448,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a LEFT OUTER JOIN smb_input1 b ON a.key = b.key ORDER BY a.key, a.value PREHOOK: type: QUERY PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-43_139_2148561557873278612/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-46_000_1719051160596831618/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a LEFT OUTER JOIN smb_input1 b ON a.key = b.key ORDER BY a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-43_139_2148561557873278612/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-46_000_1719051160596831618/-mr-10000 NULL 35 NULL NULL NULL 135 NULL NULL 48 NULL 48 NULL @@ -461,38 +461,38 @@ 200 200 200 200 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a JOIN smb_input2 b ON a.key = b.value ORDER BY a.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_input1 PREHOOK: Input: default@smb_input2 -PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-47_455_7725011320735873205/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-51_827_8844085800766914508/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a JOIN smb_input2 b ON a.key = b.value ORDER BY a.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_input1 POSTHOOK: Input: default@smb_input2 -POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-47_455_7725011320735873205/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-51_827_8844085800766914508/-mr-10000 100 100 100 100 200 200 200 200 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a JOIN smb_input2 b ON a.key = b.value ORDER BY a.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_input1 PREHOOK: Input: default@smb_input2 -PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-51_797_1660829853063165698/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-57_811_3661181644538537663/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a JOIN smb_input2 b ON a.key = b.value ORDER BY a.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_input1 POSTHOOK: Input: default@smb_input2 -POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-51_797_1660829853063165698/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-04-57_811_3661181644538537663/-mr-10000 100 100 100 100 200 200 200 200 PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a LEFT OUTER JOIN smb_input2 b ON a.key = b.value ORDER BY a.key, a.value PREHOOK: type: QUERY +PREHOOK: Input: default@smb_input1 PREHOOK: Input: default@smb_input2 -PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-56_206_1650617190450676746/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-04_807_559648882758842098/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input1 a LEFT OUTER JOIN smb_input2 b ON a.key = b.value ORDER BY a.key, a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_input1 POSTHOOK: Input: default@smb_input2 -POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-03-56_206_1650617190450676746/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-04_807_559648882758842098/-mr-10000 NULL 35 NULL NULL NULL 135 NULL NULL 48 NULL NULL NULL @@ -501,14 +501,14 @@ 200 200 200 200 PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a RIGHT OUTER JOIN smb_input2 b ON a.key = b.value ORDER BY b.key, b.value PREHOOK: type: QUERY +PREHOOK: Input: default@smb_input1 PREHOOK: Input: default@smb_input2 -PREHOOK: Input: default@smb_input1 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-00_563_760559040118626904/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-10_696_6790164286167579949/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a RIGHT OUTER JOIN smb_input2 b ON a.key = b.value ORDER BY b.key, b.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_input1 POSTHOOK: Input: default@smb_input2 -POSTHOOK: Input: default@smb_input1 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-00_563_760559040118626904/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-10_696_6790164286167579949/-mr-10000 NULL NULL NULL 35 NULL NULL NULL 135 NULL NULL 48 NULL @@ -518,11 +518,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input2 a JOIN smb_input2 b ON a.value = b.value ORDER BY a.value PREHOOK: type: QUERY PREHOOK: Input: default@smb_input2 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-04_862_4725133242101166020/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-17_104_7087439589570631019/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input2 a JOIN smb_input2 b ON a.value = b.value ORDER BY a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input2 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-04_862_4725133242101166020/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-17_104_7087439589570631019/-mr-10000 NULL 35 NULL 35 100 100 100 100 NULL 135 NULL 135 @@ -530,11 +530,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input2 a RIGHT OUTER JOIN smb_input2 b ON a.value = b.value ORDER BY b.key, b.value PREHOOK: type: QUERY PREHOOK: Input: default@smb_input2 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-09_224_8780642004670528896/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-23_036_5666167509100823043/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input2 a RIGHT OUTER JOIN smb_input2 b ON a.value = b.value ORDER BY b.key, b.value POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input2 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-09_224_8780642004670528896/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-23_036_5666167509100823043/-mr-10000 NULL 35 NULL 35 NULL 135 NULL 135 NULL NULL 48 NULL @@ -544,11 +544,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input2 a JOIN smb_input2 b ON a.value = b.value ORDER BY a.value PREHOOK: type: QUERY PREHOOK: Input: default@smb_input2 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-13_547_7917315032596933463/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-28_862_6813009729308463908/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input2 a JOIN smb_input2 b ON a.value = b.value ORDER BY a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input2 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-13_547_7917315032596933463/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-28_862_6813009729308463908/-mr-10000 NULL 35 NULL 35 100 100 100 100 NULL 135 NULL 135 @@ -556,11 +556,11 @@ PREHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input2 a LEFT OUTER JOIN smb_input2 b ON a.value = b.value ORDER BY a.key, a.value PREHOOK: type: QUERY PREHOOK: Input: default@smb_input2 -PREHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-17_843_48387701327028385/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-34_699_7855850081093628587/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ * FROM smb_input2 a LEFT OUTER JOIN smb_input2 b ON a.value = b.value ORDER BY a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_input2 -POSTHOOK: Output: file:/tmp/amarsri/hive_2010-08-23_23-04-17_843_48387701327028385/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-34_699_7855850081093628587/-mr-10000 NULL 35 NULL 35 NULL 135 NULL 135 48 NULL NULL NULL Index: ql/src/test/results/clientpositive/join26.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/join26.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/join26.q.out_0.17 (working copy) @@ -83,7 +83,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -94,12 +94,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158020 + transient_lastDdlTime 1282942261 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -153,7 +153,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -164,12 +164,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158020 + transient_lastDdlTime 1282942261 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -213,7 +213,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -224,21 +224,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158020 + transient_lastDdlTime 1282942261 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -252,13 +252,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157844 + transient_lastDdlTime 1282940169 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -269,13 +269,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157844 + transient_lastDdlTime 1282940169 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -287,14 +287,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -304,20 +304,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158020 + transient_lastDdlTime 1282942261 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -333,9 +333,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -346,12 +346,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158020 + transient_lastDdlTime 1282942261 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -362,12 +362,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158020 + transient_lastDdlTime 1282942261 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -376,7 +376,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-20_724_7183622453729573020/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-01_567_5222040103440600396/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -387,12 +387,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158020 + transient_lastDdlTime 1282942261 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -404,18 +404,18 @@ FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.key = z.key and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@src PREHOOK: Input: default@src1 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.key = z.key and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] @@ -423,11 +423,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-26_499_7478535881310159821/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-51-06_513_4212370621532341209/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-26_499_7478535881310159821/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-51-06_513_4212370621532341209/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy) @@ -160,7 +160,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -171,12 +171,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110651 + transient_lastDdlTime 1282861954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -224,7 +224,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -235,12 +235,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110651 + transient_lastDdlTime 1282861954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -249,22 +249,22 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket20.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket21.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -278,13 +278,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110642 + transient_lastDdlTime 1282861946 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -296,17 +296,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110642 + transient_lastDdlTime 1282861946 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -320,13 +320,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110642 + transient_lastDdlTime 1282861946 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -338,13 +338,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110642 + transient_lastDdlTime 1282861946 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -356,14 +356,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -373,24 +373,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110651 + transient_lastDdlTime 1282861954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -401,21 +401,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110651 + transient_lastDdlTime 1282861954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-50-51_368_5350546396771086780/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-32-34_867_8897207649119511130/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -426,12 +426,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110651 + transient_lastDdlTime 1282861954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -442,12 +442,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110651 + transient_lastDdlTime 1282861954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -458,18 +458,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] @@ -477,11 +477,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-04_261_7426219828390637762/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-32-45_477_2732155281688232597/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-04_261_7426219828390637762/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-32-45_477_2732155281688232597/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -507,18 +507,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -532,11 +532,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-24_413_3659729041059262431/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-02_299_7650145598922104246/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-24_413_3659729041059262431/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-02_299_7650145598922104246/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -573,16 +573,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-32_623_4157649257530913054/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-08_714_273114286204001219/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-32_623_4157649257530913054/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-08_714_273114286204001219/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -670,7 +670,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -681,12 +681,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110684 + transient_lastDdlTime 1282861982 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -734,7 +734,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -745,12 +745,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110684 + transient_lastDdlTime 1282861982 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -759,18 +759,18 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -784,13 +784,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110648 + transient_lastDdlTime 1282861951 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -802,17 +802,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110648 + transient_lastDdlTime 1282861951 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -826,13 +826,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110648 + transient_lastDdlTime 1282861951 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -844,13 +844,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110648 + transient_lastDdlTime 1282861951 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -862,14 +862,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -879,24 +879,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110684 + transient_lastDdlTime 1282861982 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -907,21 +907,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110684 + transient_lastDdlTime 1282861982 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-51-37_533_2275473416802667388/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-33-12_002_3382885953507765100/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -932,12 +932,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110684 + transient_lastDdlTime 1282861982 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -948,12 +948,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110684 + transient_lastDdlTime 1282861982 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -964,18 +964,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -995,11 +995,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-47_997_323383406252969587/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-20_762_1076908082223859177/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-51-47_997_323383406252969587/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-20_762_1076908082223859177/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -1049,18 +1049,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] @@ -1086,11 +1086,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-05_128_4901853024310983340/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-35_187_1026481808950583465/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-05_128_4901853024310983340/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-35_187_1026481808950583465/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1151,16 +1151,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-12_642_3510993017661467408/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-41_619_7453774123022828869/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-12_642_3510993017661467408/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-33-41_619_7453774123022828869/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/join_map_ppr.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out_0.17 (working copy) @@ -84,7 +84,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -95,12 +95,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158053 + transient_lastDdlTime 1282942493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -163,7 +163,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -174,12 +174,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158053 + transient_lastDdlTime 1282942493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -232,7 +232,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -243,21 +243,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158053 + transient_lastDdlTime 1282942493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -271,13 +271,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157844 + transient_lastDdlTime 1282940169 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -288,13 +288,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157844 + transient_lastDdlTime 1282940169 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -306,14 +306,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -323,20 +323,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158053 + transient_lastDdlTime 1282942493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -352,9 +352,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -365,12 +365,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158053 + transient_lastDdlTime 1282942493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -381,12 +381,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158053 + transient_lastDdlTime 1282942493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -395,7 +395,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-53_198_7552513637926884087/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-54-53_592_3911100532884895712/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -406,12 +406,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158053 + transient_lastDdlTime 1282942493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -423,9 +423,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 PREHOOK: type: QUERY -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@src PREHOOK: Input: default@src1 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value @@ -433,9 +433,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 POSTHOOK: type: QUERY -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] @@ -443,11 +443,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-59_150_5946892064655861445/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-54-58_494_5115558967305748352/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-59_150_5946892064655861445/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-54-58_494_5115558967305748352/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] @@ -690,7 +690,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -701,12 +701,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158059 + transient_lastDdlTime 1282942498 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -769,7 +769,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -780,12 +780,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158059 + transient_lastDdlTime 1282942498 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -838,7 +838,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -849,21 +849,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158059 + transient_lastDdlTime 1282942498 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -877,13 +877,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157844 + transient_lastDdlTime 1282940169 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -894,13 +894,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157844 + transient_lastDdlTime 1282940169 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -912,14 +912,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -929,20 +929,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158059 + transient_lastDdlTime 1282942498 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -958,9 +958,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -971,12 +971,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158059 + transient_lastDdlTime 1282942498 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -987,12 +987,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158059 + transient_lastDdlTime 1282942498 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -1001,7 +1001,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-01-10_868_4138678058783602981/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-55-08_718_8637784932212434358/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1012,12 +1012,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282158059 + transient_lastDdlTime 1282942498 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -1029,9 +1029,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 PREHOOK: type: QUERY +PREHOOK: Input: default@src1_copy +PREHOOK: Input: default@src_copy PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@src_copy -PREHOOK: Input: default@src1_copy PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value @@ -1039,9 +1039,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 POSTHOOK: type: QUERY +POSTHOOK: Input: default@src1_copy +POSTHOOK: Input: default@src_copy POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@src_copy -POSTHOOK: Input: default@src1_copy POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] @@ -1056,11 +1056,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-01-17_264_1310208710139829323/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-55-13_614_5667132009357094218/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-01-17_264_1310208710139829323/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-55-13_614_5667132009357094218/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out_0.17 (working copy) @@ -160,7 +160,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -171,12 +171,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157864 + transient_lastDdlTime 1282940624 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -224,7 +224,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -235,12 +235,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157864 + transient_lastDdlTime 1282940624 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -249,22 +249,22 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket20.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket21.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -278,13 +278,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157856 + transient_lastDdlTime 1282940615 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -296,17 +296,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157856 + transient_lastDdlTime 1282940615 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -320,13 +320,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157856 + transient_lastDdlTime 1282940615 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -338,13 +338,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157856 + transient_lastDdlTime 1282940615 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -356,14 +356,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -373,20 +373,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157864 + transient_lastDdlTime 1282940624 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -402,9 +402,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -415,12 +415,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157864 + transient_lastDdlTime 1282940624 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -431,12 +431,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157864 + transient_lastDdlTime 1282940624 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -445,7 +445,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-57-44_723_5342924335492122335/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-44_048_3362814336824214960/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -456,12 +456,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157864 + transient_lastDdlTime 1282940624 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -473,18 +473,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] @@ -492,11 +492,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-03_580_4533133116413665606/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-59_410_2642286914558110313/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-03_580_4533133116413665606/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-59_410_2642286914558110313/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -522,18 +522,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -547,11 +547,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-32_305_7290030936664523666/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-24-22_309_7849202608320032551/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-32_305_7290030936664523666/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-24-22_309_7849202608320032551/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -588,16 +588,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-39_780_2037790620779229993/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-24-28_909_4705323531161455782/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-39_780_2037790620779229993/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-24-28_909_4705323531161455782/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -685,7 +685,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -696,12 +696,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157912 + transient_lastDdlTime 1282940662 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -749,7 +749,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -760,12 +760,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157912 + transient_lastDdlTime 1282940662 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -774,18 +774,18 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -799,13 +799,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157862 + transient_lastDdlTime 1282940621 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -817,17 +817,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157862 + transient_lastDdlTime 1282940621 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -841,13 +841,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157862 + transient_lastDdlTime 1282940621 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -859,13 +859,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157862 + transient_lastDdlTime 1282940621 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -877,14 +877,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -894,20 +894,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157912 + transient_lastDdlTime 1282940662 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -923,9 +923,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -936,12 +936,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157912 + transient_lastDdlTime 1282940662 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -952,12 +952,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157912 + transient_lastDdlTime 1282940662 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -966,7 +966,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_11-58-45_253_7166928829838644011/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-24-32_579_4665962759304448485/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -977,12 +977,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282157912 + transient_lastDdlTime 1282940662 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -994,18 +994,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1025,11 +1025,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-59_325_4063144386961253784/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-24-42_813_2027159015867580639/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-58-59_325_4063144386961253784/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-24-42_813_2027159015867580639/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -1079,18 +1079,18 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] @@ -1116,11 +1116,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-59-19_620_5883251350449169318/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-25-00_186_4050824185643389015/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-59-19_620_5883251350449169318/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-25-00_186_4050824185643389015/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1181,16 +1181,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-59-27_875_3255265979471623386/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-25-06_933_2760601759923284738/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_11-59-27_875_3255265979471623386/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-25-06_933_2760601759923284738/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/join_reorder3.q.out =================================================================== --- ql/src/test/results/clientpositive/join_reorder3.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join_reorder3.q.out (working copy) @@ -178,21 +178,21 @@ JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key PREHOOK: type: QUERY -PREHOOK: Input: default@t4 +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-04-05_136_4804665682309466152/-mr-10000 +PREHOOK: Input: default@t4 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-46_655_1849254422797768428/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a,c) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key POSTHOOK: type: QUERY -POSTHOOK: Input: default@t4 +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-04-05_136_4804665682309466152/-mr-10000 +POSTHOOK: Input: default@t4 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-46_655_1849254422797768428/-mr-10000 2 12 2 22 2 12 2 12 PREHOOK: query: EXPLAIN SELECT /*+ STREAMTABLE(a,c) */ * @@ -411,19 +411,19 @@ JOIN T3 c ON a.val = c.val JOIN T4 d ON a.key + 1 = d.key + 1 PREHOOK: type: QUERY -PREHOOK: Input: default@t4 +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-04-10_594_441778213826699504/-mr-10000 +PREHOOK: Input: default@t4 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-51_531_8070318925118385343/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a,c) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON a.val = c.val JOIN T4 d ON a.key + 1 = d.key + 1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@t4 +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-04-10_594_441778213826699504/-mr-10000 +POSTHOOK: Input: default@t4 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-51_531_8070318925118385343/-mr-10000 2 22 2 12 2 12 2 12 Index: ql/src/test/results/clientpositive/smb_mapjoin_5.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_5.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_5.q.out (working copy) @@ -107,16 +107,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-36_385_4852147544870050421/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-59_506_7425705513724988701/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-36_385_4852147544870050421/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-59_506_7425705513724988701/-mr-10000 PREHOOK: query: explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY @@ -196,16 +196,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-41_764_6538872553607538338/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-03_196_5790048256402643689/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-41_764_6538872553607538338/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-03_196_5790048256402643689/-mr-10000 PREHOOK: query: explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY @@ -285,16 +285,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-47_525_7609537170706520957/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-06_769_3169291687857146687/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-47_525_7609537170706520957/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-06_769_3169291687857146687/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL @@ -379,16 +379,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-53_208_8042596508650249993/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-10_347_1368732534465416726/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-53_208_8042596508650249993/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-10_347_1368732534465416726/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -474,16 +474,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-59_432_1032700429402645344/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-13_914_4693922800230325669/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-59_432_1032700429402645344/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-13_914_4693922800230325669/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL @@ -574,16 +574,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-05_146_2709155653925072592/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-17_489_2041919668806434357/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-05_146_2709155653925072592/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-17_489_2041919668806434357/-mr-10000 NULL NULL 20 val_20 20 val_20 NULL NULL 23 val_23 23 val_23 PREHOOK: query: explain @@ -665,16 +665,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-14_799_2798390848490699688/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-21_045_2702987110966950539/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-14_799_2798390848490699688/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-21_045_2702987110966950539/-mr-10000 NULL NULL 20 val_20 20 val_20 NULL NULL 23 val_23 23 val_23 NULL NULL 25 val_25 NULL NULL @@ -758,16 +758,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-19_911_7441488040869826959/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-24_617_7501871832149027380/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-19_911_7441488040869826959/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-24_617_7501871832149027380/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -853,16 +853,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-24_418_3887561716344993053/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-28_185_262923637560436278/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-24_418_3887561716344993053/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-28_185_262923637560436278/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -950,16 +950,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-29_446_6462649411348235491/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-31_728_1272689177460006956/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-29_446_6462649411348235491/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-31_728_1272689177460006956/-mr-10000 NULL NULL 20 val_20 20 val_20 NULL NULL 23 val_23 23 val_23 PREHOOK: query: explain @@ -1041,16 +1041,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-35_324_8959322839997459232/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-35_288_314053154578595998/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-35_324_8959322839997459232/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-35_288_314053154578595998/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL @@ -1139,16 +1139,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-41_147_7418291170107301127/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-38_854_954250834063103058/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-41_147_7418291170107301127/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-38_854_954250834063103058/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -1234,16 +1234,16 @@ PREHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-46_184_791404943752804999/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-42_403_8300758914248889413/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-48-46_184_791404943752804999/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-42_403_8300758914248889413/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -45,7 +45,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/heyongqiang/hive_2010-08-10_14-32-48_226_5686802088939269333/-mr-10002 + directory: file:/tmp/jsichi/hive_2010-08-26_15-59-49_528_2718740905898829882/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -81,7 +81,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/heyongqiang/hive_2010-08-10_14-32-48_226_5686802088939269333/-mr-10002 + directory: file:/tmp/jsichi/hive_2010-08-26_15-59-49_528_2718740905898829882/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -94,9 +94,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/src [y] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [y] Path -> Partition: - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -107,12 +107,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281474272 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -123,12 +123,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281474272 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -136,7 +136,7 @@ Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-08-10_14-32-48_226_5686802088939269333/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_15-59-49_528_2718740905898829882/-mr-10002 Select Operator expressions: expr: _col0 @@ -192,10 +192,10 @@ type: string Needs Tagging: true Path -> Alias: - file:/tmp/heyongqiang/hive_2010-08-10_14-32-48_226_5686802088939269333/-mr-10002 [file:/tmp/heyongqiang/hive_2010-08-10_14-32-48_226_5686802088939269333/-mr-10002] - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + file:/tmp/jsichi/hive_2010-08-26_15-59-49_528_2718740905898829882/-mr-10002 [file:/tmp/jsichi/hive_2010-08-26_15-59-49_528_2718740905898829882/-mr-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - file:/tmp/heyongqiang/hive_2010-08-10_14-32-48_226_5686802088939269333/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_15-59-49_528_2718740905898829882/-mr-10002 Partition base file name: -mr-10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -211,7 +211,7 @@ columns _col0,_col1,_col5 columns.types string,string,string escape.delim \ - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -225,13 +225,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281474268 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -242,13 +242,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281474268 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -273,7 +273,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_14-32-48_226_5686802088939269333/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-49_528_2718740905898829882/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -284,12 +284,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281475968 + transient_lastDdlTime 1282863589 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -299,7 +299,7 @@ Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_14-32-48_226_5686802088939269333/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-49_528_2718740905898829882/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -309,15 +309,15 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281475968 + transient_lastDdlTime 1282863589 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_14-32-48_226_5686802088939269333/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-49_528_2718740905898829882/-ext-10001 PREHOOK: query: INSERT OVERWRITE TABLE dest_j1 @@ -325,18 +325,18 @@ FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@src PREHOOK: Input: default@src1 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x) */ x.key, z.value, y.value FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] @@ -344,11 +344,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-08-10_14-32-53_828_6774209024808329746/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-59-56_099_6500613120945442582/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-08-10_14-32-53_828_6774209024808329746/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-59-56_099_6500613120945442582/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join_hive_626.q.out =================================================================== --- ql/src/test/results/clientpositive/join_hive_626.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join_hive_626.q.out (working copy) @@ -172,15 +172,15 @@ PREHOOK: query: select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id = hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id PREHOOK: type: QUERY +PREHOOK: Input: default@hive_bar +PREHOOK: Input: default@hive_count PREHOOK: Input: default@hive_foo -PREHOOK: Input: default@hive_count -PREHOOK: Input: default@hive_bar -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-01-56_605_4228947637564753632/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-16_764_5764737371981446922/-mr-10000 POSTHOOK: query: select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id = hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id POSTHOOK: type: QUERY +POSTHOOK: Input: default@hive_bar +POSTHOOK: Input: default@hive_count POSTHOOK: Input: default@hive_foo -POSTHOOK: Input: default@hive_count -POSTHOOK: Input: default@hive_bar -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-01-56_605_4228947637564753632/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-16_764_5764737371981446922/-mr-10000 foo1 bar10 2 Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 990244) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy) @@ -73,11 +73,11 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -88,12 +88,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -104,16 +104,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -127,13 +127,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -144,17 +144,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -168,13 +168,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -185,13 +185,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -223,7 +223,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-14_626_7302074914677033648/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-27-58_740_7654580536117070815/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -248,10 +248,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-14_780_4918036257771774218/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-27-58_900_249160347924848000/-mr-10000 POSTHOOK: query: FROM src a RIGHT OUTER JOIN @@ -260,10 +260,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-14_780_4918036257771774218/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-27-58_900_249160347924848000/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 @@ -356,11 +356,11 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -371,12 +371,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -387,16 +387,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -410,13 +410,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -427,17 +427,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -451,13 +451,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -468,13 +468,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -506,7 +506,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-19_945_6699980362823333608/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-28-03_530_5511408924303498423/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -534,7 +534,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-20_075_9210262797947282562/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-28-03_693_6030547893373702709/-mr-10000 POSTHOOK: query: FROM srcpart a RIGHT OUTER JOIN @@ -546,7 +546,7 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-20_075_9210262797947282562/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-28-03_693_6030547893373702709/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 @@ -631,11 +631,11 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -646,12 +646,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -662,16 +662,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -685,13 +685,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -702,17 +702,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -726,13 +726,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -743,13 +743,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -781,7 +781,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-26_295_2729772949874855026/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-28-08_327_7225187921961050710/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -806,10 +806,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-26_417_2072973793307150042/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-28-08_481_2074258607919607662/-mr-10000 POSTHOOK: query: FROM src a RIGHT OUTER JOIN @@ -818,10 +818,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-26_417_2072973793307150042/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-28-08_481_2074258607919607662/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 @@ -906,13 +906,13 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [a] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -923,12 +923,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -939,16 +939,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -962,13 +962,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -979,17 +979,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -1003,13 +1003,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1020,17 +1020,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -1044,13 +1044,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1061,17 +1061,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -1085,13 +1085,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1102,13 +1102,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -1140,7 +1140,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-32_265_4665772535283084323/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-28-13_124_7639159324445029873/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1170,7 +1170,7 @@ PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-32_393_6413563964244498371/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-28-13_297_1158788927411509004/-mr-10000 POSTHOOK: query: FROM srcpart a RIGHT OUTER JOIN @@ -1184,7 +1184,7 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-28-32_393_6413563964244498371/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-28-13_297_1158788927411509004/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 Index: ql/src/test/results/clientpositive/archive.q.out =================================================================== --- ql/src/test/results/clientpositive/archive.q.out (revision 990244) +++ ql/src/test/results/clientpositive/archive.q.out (working copy) @@ -103,7 +103,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-41-57_236_7094587945850687486/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-18_976_1272829475610150800/-mr-10000 POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19) SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col @@ -111,7 +111,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-41-57_236_7094587945850687486/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-18_976_1272829475610150800/-mr-10000 POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -146,13 +146,13 @@ PREHOOK: type: QUERY PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-05_285_507626451298228865/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-25_151_2967941869328137406/-mr-10000 POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 POSTHOOK: type: QUERY POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-05_285_507626451298228865/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-25_151_2967941869328137406/-mr-10000 POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -167,11 +167,11 @@ PREHOOK: query: SELECT key, count(1) FROM tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key PREHOOK: type: QUERY PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-10_049_6066385213850923753/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-28_548_4632264336389099950/-mr-10000 POSTHOOK: query: SELECT key, count(1) FROM tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-10_049_6066385213850923753/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-28_548_4632264336389099950/-mr-10000 POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -188,13 +188,13 @@ PREHOOK: type: QUERY PREHOOK: Input: default@tstsrc PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-13_594_7068481220028250163/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-31_792_4165036420533315437/-mr-10000 POSTHOOK: query: SELECT * FROM tstsrcpart a JOIN tstsrc b ON a.key=b.key WHERE a.ds='2008-04-08' AND a.hr='12' AND a.key='0' POSTHOOK: type: QUERY POSTHOOK: Input: default@tstsrc POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-13_594_7068481220028250163/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-31_792_4165036420533315437/-mr-10000 POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -237,13 +237,13 @@ PREHOOK: type: QUERY PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-18_911_7638636591676197332/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-35_688_2955384568705499730/-mr-10000 POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 POSTHOOK: type: QUERY POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-18_911_7638636591676197332/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-35_688_2955384568705499730/-mr-10000 POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -296,11 +296,11 @@ PREHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key PREHOOK: type: QUERY PREHOOK: Input: default@harbucket@ds=1 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-27_341_4171791869922370959/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-42_283_2054256979152422951/-mr-10000 POSTHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@harbucket@ds=1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-27_341_4171791869922370959/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-42_283_2054256979152422951/-mr-10000 POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -340,11 +340,11 @@ PREHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key PREHOOK: type: QUERY PREHOOK: Input: default@harbucket@ds=1 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-33_554_4982150859229878793/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-48_350_3116069503890468898/-mr-10000 POSTHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@harbucket@ds=1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-33_554_4982150859229878793/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-48_350_3116069503890468898/-mr-10000 POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -384,11 +384,11 @@ PREHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key PREHOOK: type: QUERY PREHOOK: Input: default@harbucket@ds=1 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-37_492_8334218564133786220/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-51_789_5787141881350623573/-mr-10000 POSTHOOK: query: SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@harbucket@ds=1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-37_492_8334218564133786220/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-25-51_789_5787141881350623573/-mr-10000 POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -468,12 +468,12 @@ FROM (SELECT * FROM old_name WHERE ds='1') subq1) subq2 PREHOOK: type: QUERY PREHOOK: Input: default@old_name@ds=1 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-47_944_3509519100083434350/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-26-01_045_8209299115713838372/-mr-10000 POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col FROM (SELECT * FROM old_name WHERE ds='1') subq1) subq2 POSTHOOK: type: QUERY POSTHOOK: Input: default@old_name@ds=1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-47_944_3509519100083434350/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-26-01_045_8209299115713838372/-mr-10000 POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -494,8 +494,8 @@ POSTHOOK: query: ALTER TABLE old_name RENAME TO new_name POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: default@old_name +POSTHOOK: Output: default@new_name POSTHOOK: Output: default@old_name -POSTHOOK: Output: default@new_name POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -512,12 +512,12 @@ FROM (SELECT * FROM new_name WHERE ds='1') subq1) subq2 PREHOOK: type: QUERY PREHOOK: Input: default@new_name@ds=1 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-51_806_7196728345940154423/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-26-04_352_8867631871149339879/-mr-10000 POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col FROM (SELECT * FROM new_name WHERE ds='1') subq1) subq2 POSTHOOK: type: QUERY POSTHOOK: Input: default@new_name@ds=1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_13-42-51_806_7196728345940154423/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-26-04_352_8867631871149339879/-mr-10000 POSTHOOK: Lineage: harbucket PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: old_name PARTITION(ds=1).key EXPRESSION [(tstsrc)tstsrc.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/lineage1.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/lineage1.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/lineage1.q.out_0.17 (working copy) @@ -101,7 +101,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-06-07_217_8924431315853651638/-mr-10002 + file:/tmp/jsichi/hive_2010-08-27_14-01-08_880_5138753499787161938/-mr-10002 Union Select Operator expressions: @@ -125,7 +125,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_l1 - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-06-07_217_8924431315853651638/-mr-10004 + file:/tmp/jsichi/hive_2010-08-27_14-01-08_880_5138753499787161938/-mr-10004 Union Select Operator expressions: @@ -157,7 +157,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_13-06-07_217_8924431315853651638/-ext-10000 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_14-01-08_880_5138753499787161938/-ext-10000 Stage: Stage-0 Move Operator @@ -172,7 +172,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_13-06-07_217_8924431315853651638/-ext-10003 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_14-01-08_880_5138753499787161938/-ext-10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -264,8 +264,8 @@ LEFT OUTER JOIN src p2 ON (t2.key = p2.key)) j PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src PREHOOK: Output: default@dest_l1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_l1 SELECT j.* @@ -279,8 +279,8 @@ LEFT OUTER JOIN src p2 ON (t2.key = p2.key)) j POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_l1 POSTHOOK: Lineage: dest_l1.key EXPRESSION [(src1)t1.FieldSchema(name:key, type:string, comment:default), (src1)t2.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_l1.value EXPRESSION [(src)p2.FieldSchema(name:value, type:string, comment:default), (src)p1.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/filter_join_breaktask2.q.out =================================================================== --- ql/src/test/results/clientpositive/filter_join_breaktask2.q.out (revision 990244) +++ ql/src/test/results/clientpositive/filter_join_breaktask2.q.out (working copy) @@ -253,11 +253,11 @@ PREHOOK: query: select * from T2 PREHOOK: type: QUERY PREHOOK: Input: default@t2@ds=2010-04-17 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_463_1385997241277658790/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-10_566_4433224025311049348/-mr-10000 POSTHOOK: query: select * from T2 POSTHOOK: type: QUERY POSTHOOK: Input: default@t2@ds=2010-04-17 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_463_1385997241277658790/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-10_566_4433224025311049348/-mr-10000 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c1 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c2 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c3 SIMPLE [] @@ -382,11 +382,11 @@ PREHOOK: query: select * from T1 PREHOOK: type: QUERY PREHOOK: Input: default@t1@ds=2010-04-17 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_668_3928437893588448271/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-10_954_8596376016737090385/-mr-10000 POSTHOOK: query: select * from T1 POSTHOOK: type: QUERY POSTHOOK: Input: default@t1@ds=2010-04-17 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_668_3928437893588448271/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-10_954_8596376016737090385/-mr-10000 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c1 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c2 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c3 SIMPLE [] @@ -511,11 +511,11 @@ PREHOOK: query: select * from T3 PREHOOK: type: QUERY PREHOOK: Input: default@t3@ds=2010-04-17 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_801_6024512080264702124/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-11_371_4261884027917519942/-mr-10000 POSTHOOK: query: select * from T3 POSTHOOK: type: QUERY POSTHOOK: Input: default@t3@ds=2010-04-17 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_801_6024512080264702124/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-11_371_4261884027917519942/-mr-10000 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c1 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c2 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c3 SIMPLE [] @@ -640,11 +640,11 @@ PREHOOK: query: select * from T4 PREHOOK: type: QUERY PREHOOK: Input: default@t4@ds=2010-04-17 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_931_7314109222032805369/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-11_796_8203509595344384965/-mr-10000 POSTHOOK: query: select * from T4 POSTHOOK: type: QUERY POSTHOOK: Input: default@t4@ds=2010-04-17 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-18_931_7314109222032805369/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-11_796_8203509595344384965/-mr-10000 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c1 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c2 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c3 SIMPLE [] @@ -774,11 +774,11 @@ JOIN T4 d ON (c.c0 = d.c0 AND c.ds='2010-04-17' AND d.ds='2010-04-17') PREHOOK: type: QUERY -PREHOOK: Input: default@t4@ds=2010-04-17 +PREHOOK: Input: default@t1@ds=2010-04-17 PREHOOK: Input: default@t2@ds=2010-04-17 PREHOOK: Input: default@t3@ds=2010-04-17 -PREHOOK: Input: default@t1@ds=2010-04-17 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-19_056_2171086064566020577/10000 +PREHOOK: Input: default@t4@ds=2010-04-17 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-12_248_6929768884094642056/-mr-10000 POSTHOOK: query: SELECT a.c1 as a_c1, b.c1 b_c1, d.c0 as d_c0 FROM T1 a JOIN T2 b ON (a.c1 = b.c1 AND a.ds='2010-04-17' AND b.ds='2010-04-17') @@ -787,11 +787,11 @@ JOIN T4 d ON (c.c0 = d.c0 AND c.ds='2010-04-17' AND d.ds='2010-04-17') POSTHOOK: type: QUERY -POSTHOOK: Input: default@t4@ds=2010-04-17 +POSTHOOK: Input: default@t1@ds=2010-04-17 POSTHOOK: Input: default@t2@ds=2010-04-17 POSTHOOK: Input: default@t3@ds=2010-04-17 -POSTHOOK: Input: default@t1@ds=2010-04-17 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-22-19_056_2171086064566020577/10000 +POSTHOOK: Input: default@t4@ds=2010-04-17 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-41-12_248_6929768884094642056/-mr-10000 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c1 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c2 SIMPLE [] POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c3 SIMPLE [] Index: ql/src/test/results/clientpositive/bucketmapjoin6.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin6.q.out (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin6.q.out (working copy) @@ -43,15 +43,15 @@ select /*+ MAPJOIN(l) */ i.a, i.b, l.b from tmp1 i join tmp2 l ON i.a = l.a PREHOOK: type: QUERY +PREHOOK: Input: default@tmp1 PREHOOK: Input: default@tmp2 -PREHOOK: Input: default@tmp1 PREHOOK: Output: default@tmp3 POSTHOOK: query: insert overwrite table tmp3 select /*+ MAPJOIN(l) */ i.a, i.b, l.b from tmp1 i join tmp2 l ON i.a = l.a POSTHOOK: type: QUERY +POSTHOOK: Input: default@tmp1 POSTHOOK: Input: default@tmp2 -POSTHOOK: Input: default@tmp1 POSTHOOK: Output: default@tmp3 POSTHOOK: Lineage: tmp1.a SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp1.b SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -63,11 +63,11 @@ PREHOOK: query: select * from tmp3 order by a, b, c PREHOOK: type: QUERY PREHOOK: Input: default@tmp3 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-16-34_797_3291869263216721850/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-18-51_917_5942757771764717347/-mr-10000 POSTHOOK: query: select * from tmp3 order by a, b, c POSTHOOK: type: QUERY POSTHOOK: Input: default@tmp3 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-16-34_797_3291869263216721850/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-18-51_917_5942757771764717347/-mr-10000 POSTHOOK: Lineage: tmp1.a SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp1.b SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: tmp2.a SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/smb_mapjoin_6.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (working copy) @@ -127,14 +127,14 @@ PREHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results POSTHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results POSTHOOK: Lineage: smb_bucket4_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -147,11 +147,11 @@ PREHOOK: query: select * from smb_join_results order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-10_926_8745628360725409758/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-58_355_4241912694869470636/-mr-10000 POSTHOOK: query: select * from smb_join_results order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-10_926_8745628360725409758/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-43-58_355_4241912694869470636/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -1190,13 +1190,13 @@ 498 val_498 498 val_498 PREHOOK: query: insert overwrite table normal_join_results select * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@normal_join_results POSTHOOK: query: insert overwrite table normal_join_results select * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@normal_join_results POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -1213,11 +1213,11 @@ PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results PREHOOK: type: QUERY PREHOOK: Input: default@normal_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-23_089_8916105885194180283/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-07_386_5215362180353440890/-mr-10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@normal_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-23_089_8916105885194180283/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-07_386_5215362180353440890/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.v1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:from deserializer), ] @@ -1234,11 +1234,11 @@ PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-27_271_4277665023453991774/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-10_503_5044933337669213788/-mr-10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-27_271_4277665023453991774/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-10_503_5044933337669213788/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.v1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:from deserializer), ] @@ -1343,14 +1343,14 @@ PREHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(b)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results POSTHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(b)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -1371,14 +1371,14 @@ PREHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results POSTHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -1403,11 +1403,11 @@ PREHOOK: query: select * from smb_join_results order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-42_542_6844851430917483539/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-21_789_8349972338199515523/-mr-10000 POSTHOOK: query: select * from smb_join_results order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-42_542_6844851430917483539/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-21_789_8349972338199515523/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.v1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:from deserializer), ] @@ -2458,13 +2458,13 @@ 498 val_498 498 val_498 PREHOOK: query: insert overwrite table normal_join_results select * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@normal_join_results POSTHOOK: query: insert overwrite table normal_join_results select * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@normal_join_results POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -2493,11 +2493,11 @@ PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results PREHOOK: type: QUERY PREHOOK: Input: default@normal_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-54_759_7739155121976805350/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-30_839_4771778288932582310/-mr-10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@normal_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-54_759_7739155121976805350/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-30_839_4771778288932582310/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -2526,11 +2526,11 @@ PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-59_435_4638146387519059756/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-33_938_44080156296442227/-mr-10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-49-59_435_4638146387519059756/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-33_938_44080156296442227/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -2663,14 +2663,14 @@ PREHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key where a.key>1000 PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results POSTHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key where a.key>1000 POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -2815,14 +2815,14 @@ PREHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(b)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key where a.key>1000 PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results POSTHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(b)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key where a.key>1000 POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -2975,14 +2975,14 @@ PREHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key join smb_bucket4_2 c on b.key = c.key where a.key>1000 PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-50-18_801_6427179719209140669/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-46_257_446569303031962295/-mr-10000 POSTHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a join smb_bucket4_2 b on a.key = b.key join smb_bucket4_2 c on b.key = c.key where a.key>1000 POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-50-18_801_6427179719209140669/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-44-46_257_446569303031962295/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:from deserializer), ] Index: ql/src/test/results/clientpositive/input39.q.out =================================================================== --- ql/src/test/results/clientpositive/input39.q.out (revision 990244) +++ ql/src/test/results/clientpositive/input39.q.out (working copy) @@ -150,7 +150,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_12-48-18_227_47797524032104088/-mr-10002 + file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-52-45_661_2147720856532036723/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -183,14 +183,14 @@ PREHOOK: query: select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1' PREHOOK: type: QUERY +PREHOOK: Input: default@t1@ds=1 PREHOOK: Input: default@t2@ds=1 -PREHOOK: Input: default@t1@ds=1 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_12-48-18_383_386196245099968730/-mr-10000 +PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-52-45_820_1879500705005233044/-mr-10000 POSTHOOK: query: select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1' POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1@ds=1 POSTHOOK: Input: default@t2@ds=1 -POSTHOOK: Input: default@t1@ds=1 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_12-48-18_383_386196245099968730/-mr-10000 +POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-52-45_820_1879500705005233044/-mr-10000 POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -137,7 +137,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -148,12 +148,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244561 + transient_lastDdlTime 1282861665 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -213,7 +213,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -224,12 +224,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244561 + transient_lastDdlTime 1282861665 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -238,15 +238,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket21.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -258,12 +258,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244556 + transient_lastDdlTime 1282861660 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -275,12 +275,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244556 + transient_lastDdlTime 1282861660 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -292,14 +292,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -309,24 +309,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244561 + transient_lastDdlTime 1282861665 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -337,21 +337,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244561 + transient_lastDdlTime 1282861665 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-02-41_234_3337660673351846553/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-27-45_443_5246531592135492584/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -362,12 +362,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244561 + transient_lastDdlTime 1282861665 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -378,12 +378,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244561 + transient_lastDdlTime 1282861665 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -394,16 +394,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] @@ -411,11 +411,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-02-52_873_5262421080671458052/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-27-53_775_1430223565708098665/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-02-52_873_5262421080671458052/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-27-53_775_1430223565708098665/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -441,16 +441,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -464,11 +464,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-10_734_2679038190670849271/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-07_896_8980413570460942198/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-10_734_2679038190670849271/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-07_896_8980413570460942198/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -505,16 +505,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-18_327_8125728407371831204/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-14_304_6082586747306863372/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-18_327_8125728407371831204/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-14_304_6082586747306863372/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -614,7 +614,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -625,12 +625,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244590 + transient_lastDdlTime 1282861687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -685,7 +685,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -696,12 +696,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244590 + transient_lastDdlTime 1282861687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -710,17 +710,17 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -734,13 +734,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244556 + transient_lastDdlTime 1282861661 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -752,13 +752,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244556 + transient_lastDdlTime 1282861661 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -770,14 +770,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -787,24 +787,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244590 + transient_lastDdlTime 1282861687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -815,21 +815,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244590 + transient_lastDdlTime 1282861687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-03-23_031_48721243690597607/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-28-18_728_7919250035608448387/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -840,12 +840,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244590 + transient_lastDdlTime 1282861687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -856,12 +856,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244590 + transient_lastDdlTime 1282861687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -872,16 +872,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -901,11 +901,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-34_456_386906163891339513/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-27_929_4309708501631568542/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-34_456_386906163891339513/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-27_929_4309708501631568542/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -955,16 +955,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] @@ -990,11 +990,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-51_754_7367321468145787086/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-43_910_1958177214587769286/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-03-51_754_7367321468145787086/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-43_910_1958177214587769286/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1055,16 +1055,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-00_459_1797831193771362477/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-50_247_5816889851285149323/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-00_459_1797831193771362477/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-28-50_247_5816889851285149323/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/smb_mapjoin_1.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_1.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_1.q.out (working copy) @@ -96,14 +96,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-25_514_1522228888338492810/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-37_401_1624871895926870177/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-25_514_1522228888338492810/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-37_401_1624871895926870177/-mr-10000 PREHOOK: query: explain select /*+mapjoin(a)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY @@ -172,14 +172,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-29_836_7003200343993374451/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-40_840_7571037129459555610/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-29_836_7003200343993374451/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-40_840_7571037129459555610/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 NULL NULL @@ -253,14 +253,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-34_164_3075844906078088691/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-44_246_8651570096717420731/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-34_164_3075844906078088691/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-44_246_8651570096717420731/-mr-10000 NULL NULL 20 val_20 NULL NULL 23 val_23 NULL NULL 25 val_25 @@ -333,14 +333,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-38_566_6581323232434123318/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-47_643_7636568753652413332/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-38_566_6581323232434123318/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-47_643_7636568753652413332/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 NULL NULL @@ -418,14 +418,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-43_501_3496508926661952381/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-50_981_5137008327464965093/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-43_501_3496508926661952381/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-50_981_5137008327464965093/-mr-10000 PREHOOK: query: explain select /*+mapjoin(b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY @@ -494,14 +494,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-47_669_5185705008991050470/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-54_392_4676779510423673308/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-47_669_5185705008991050470/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-54_392_4676779510423673308/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 NULL NULL @@ -575,14 +575,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-51_748_7678694225223867435/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-57_736_3881625090699387310/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-51_748_7678694225223867435/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-57_736_3881625090699387310/-mr-10000 NULL NULL 20 val_20 NULL NULL 23 val_23 NULL NULL 25 val_25 @@ -655,14 +655,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-56_103_5715275651117467212/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-01_128_5842257160386435543/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-56_103_5715275651117467212/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-01_128_5842257160386435543/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 NULL NULL Index: ql/src/test/results/clientpositive/join_rc.q.out =================================================================== --- ql/src/test/results/clientpositive/join_rc.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join_rc.q.out (working copy) @@ -114,15 +114,15 @@ PREHOOK: query: select join_rc1.key, join_rc2.value FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key PREHOOK: type: QUERY +PREHOOK: Input: default@join_rc1 PREHOOK: Input: default@join_rc2 -PREHOOK: Input: default@join_rc1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-02-49_005_2982007273012088026/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-48_562_272409638263559506/-mr-10000 POSTHOOK: query: select join_rc1.key, join_rc2.value FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@join_rc1 POSTHOOK: Input: default@join_rc2 -POSTHOOK: Input: default@join_rc1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-02-49_005_2982007273012088026/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-48_562_272409638263559506/-mr-10000 POSTHOOK: Lineage: join_rc1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: join_rc1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: join_rc2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out_0.17 (working copy) @@ -140,7 +140,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -151,12 +151,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351922 + transient_lastDdlTime 1282940468 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -214,7 +214,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -225,12 +225,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351922 + transient_lastDdlTime 1282940468 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -239,15 +239,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket22.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket23.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -261,13 +261,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351921 + transient_lastDdlTime 1282940466 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -279,13 +279,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351921 + transient_lastDdlTime 1282940466 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -297,14 +297,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -314,20 +314,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351922 + transient_lastDdlTime 1282940468 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -343,9 +343,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -356,12 +356,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351922 + transient_lastDdlTime 1282940468 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -372,12 +372,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351922 + transient_lastDdlTime 1282940468 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -386,7 +386,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-02_860_5301515997300427671/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-08_428_5895841844554846760/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -397,12 +397,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351922 + transient_lastDdlTime 1282940468 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -431,11 +431,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-14_585_3206495313015062115/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-18_101_2503635838902230563/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-14_585_3206495313015062115/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-18_101_2503635838902230563/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -484,11 +484,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-32_919_4852266720815719336/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-33_708_6713438406371908224/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-32_919_4852266720815719336/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-33_708_6713438406371908224/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -525,16 +525,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-41_332_46957872982058641/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-40_391_5136557064866380488/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-41_332_46957872982058641/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-40_391_5136557064866380488/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -632,7 +632,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -643,12 +643,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351952 + transient_lastDdlTime 1282940493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -706,7 +706,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -717,12 +717,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351952 + transient_lastDdlTime 1282940493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -731,17 +731,17 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt], srcbucket22.txt=[srcbucket22.txt], srcbucket23.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + a {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -755,13 +755,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351918 + transient_lastDdlTime 1282940464 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -773,13 +773,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351918 + transient_lastDdlTime 1282940464 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -791,14 +791,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -808,20 +808,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351952 + transient_lastDdlTime 1282940493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -837,9 +837,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -850,12 +850,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351952 + transient_lastDdlTime 1282940493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -866,12 +866,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351952 + transient_lastDdlTime 1282940493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -880,7 +880,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-52-46_328_92745087603790285/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-21-44_043_2208309954683066173/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -891,12 +891,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351952 + transient_lastDdlTime 1282940493 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -937,11 +937,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-58_736_8462445101349885253/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-54_369_3834715752895970116/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-52-58_736_8462445101349885253/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-21-54_369_3834715752895970116/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -1026,11 +1026,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-53-19_776_1081750748779838060/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-11_829_429037448585234121/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-53-19_776_1081750748779838060/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-11_829_429037448585234121/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -1091,16 +1091,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-53-28_109_3659787088225214235/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-18_524_6184888372779522742/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-53-28_109_3659787088225214235/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-18_524_6184888372779522742/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 990244) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy) @@ -78,11 +78,11 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -93,12 +93,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -109,16 +109,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -132,13 +132,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -149,17 +149,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -173,13 +173,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -190,13 +190,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -228,7 +228,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-33_607_3608648151043953882/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-13-53_609_8727040543187359730/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -253,10 +253,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-33_811_5068391106719281565/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-13-53_763_8356433223655473381/-mr-10000 POSTHOOK: query: FROM src a LEFT OUTER JOIN @@ -265,10 +265,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-33_811_5068391106719281565/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-13-53_763_8356433223655473381/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 @@ -356,11 +356,11 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -371,12 +371,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -387,16 +387,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -410,13 +410,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -427,17 +427,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -451,13 +451,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -468,13 +468,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -506,7 +506,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-39_060_2254270640399868957/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-13-58_421_1926636805796976144/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -534,7 +534,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-39_184_5528117984529953882/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-13-58_580_4139293399371641463/-mr-10000 POSTHOOK: query: FROM srcpart a LEFT OUTER JOIN @@ -546,7 +546,7 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-39_184_5528117984529953882/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-13-58_580_4139293399371641463/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 @@ -631,13 +631,13 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -648,12 +648,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -664,16 +664,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -687,13 +687,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -704,17 +704,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -728,13 +728,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -745,17 +745,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -769,13 +769,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -786,17 +786,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -810,13 +810,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -827,13 +827,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -865,7 +865,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-45_337_3700166986367295400/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-14-03_203_9134886317608012397/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -890,12 +890,12 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-45_489_4299540835676903876/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-03_392_1161117122027170023/-mr-10000 POSTHOOK: query: FROM src a LEFT OUTER JOIN @@ -904,12 +904,12 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-45_489_4299540835676903876/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-03_392_1161117122027170023/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 @@ -994,11 +994,11 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -1009,12 +1009,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1025,16 +1025,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -1048,13 +1048,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1065,17 +1065,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -1089,13 +1089,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1106,13 +1106,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -1144,7 +1144,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-53_330_6615969232574084571/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-14-08_638_7208426036779412420/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1172,7 +1172,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-53_462_1954770934755044899/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-08_790_7032133874332300547/-mr-10000 POSTHOOK: query: FROM srcpart a LEFT OUTER JOIN @@ -1184,7 +1184,7 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-11-53_462_1954770934755044899/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-08_790_7032133874332300547/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 Index: ql/src/test/results/clientpositive/join_reorder.q.out =================================================================== --- ql/src/test/results/clientpositive/join_reorder.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join_reorder.q.out (working copy) @@ -191,13 +191,13 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-02-55_951_7421942426050978131/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-55_909_642870158604473859/-mr-10000 POSTHOOK: query: FROM T1 a JOIN src c ON c.key+1=a.key SELECT a.key, a.val, c.key POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-02-55_951_7421942426050978131/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-55_909_642870158604473859/-mr-10000 1 11 0 1 11 0 1 11 0 @@ -207,13 +207,13 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-00_878_7390588821971448332/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-59_174_1281139540816451802/-mr-10000 POSTHOOK: query: FROM T1 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ a.key, a.val, c.key POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-00_878_7390588821971448332/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-05-59_174_1281139540816451802/-mr-10000 1 11 0 1 11 0 1 11 0 @@ -497,19 +497,19 @@ RIGHT OUTER JOIN T3 c ON (c.val = a.val) SELECT a.key, b.key, a.val, c.val PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-07_262_9113480534666709881/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-02_681_8146804917511289759/-mr-10000 POSTHOOK: query: FROM T1 a LEFT OUTER JOIN T2 b ON (b.key=a.key) RIGHT OUTER JOIN T3 c ON (c.val = a.val) SELECT a.key, b.key, a.val, c.val POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-07_262_9113480534666709881/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-02_681_8146804917511289759/-mr-10000 2 2 12 12 NULL NULL NULL 14 NULL NULL NULL 16 @@ -519,19 +519,19 @@ RIGHT OUTER JOIN T3 c ON (c.val = a.val) SELECT /*+ STREAMTABLE(a) */ a.key, b.key, a.val, c.val PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-15_685_7005957441622210859/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-09_002_2308842085626596627/-mr-10000 POSTHOOK: query: FROM T1 a LEFT OUTER JOIN T2 b ON (b.key=a.key) RIGHT OUTER JOIN T3 c ON (c.val = a.val) SELECT /*+ STREAMTABLE(a) */ a.key, b.key, a.val, c.val POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-15_685_7005957441622210859/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-09_002_2308842085626596627/-mr-10000 2 2 12 12 NULL NULL NULL 14 NULL NULL NULL 16 @@ -766,20 +766,20 @@ PRESERVE T3 c (c.key, c.val) SELECT a.key, b.key, c.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-24_689_644851301802156755/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-15_440_3508804411194375153/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key, a.val), PRESERVE T2 b (b.key, b.val), PRESERVE T3 c (c.key, c.val) SELECT a.key, b.key, c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-24_689_644851301802156755/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-15_440_3508804411194375153/-mr-10000 1 NULL NULL 2 NULL 2 NULL 2 NULL @@ -797,20 +797,20 @@ PRESERVE T3 c (c.key, c.val) SELECT /*+ STREAMTABLE(b) */ a.key, b.key, c.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-30_651_4057662598234428262/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-20_053_2355797831193656267/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key, a.val), PRESERVE T2 b (b.key, b.val), PRESERVE T3 c (c.key, c.val) SELECT /*+ STREAMTABLE(b) */ a.key, b.key, c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-30_651_4057662598234428262/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-20_053_2355797831193656267/-mr-10000 1 NULL NULL 2 NULL 2 NULL 2 NULL Index: ql/src/test/results/clientpositive/lineage1.q.out =================================================================== --- ql/src/test/results/clientpositive/lineage1.q.out (revision 990244) +++ ql/src/test/results/clientpositive/lineage1.q.out (working copy) @@ -101,7 +101,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/nzhang/hive_2010-08-17_22-55-14_712_8459267361465490341/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-08-08_217_4989832071586172776/-mr-10002 Union Select Operator expressions: @@ -125,7 +125,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_l1 - file:/tmp/nzhang/hive_2010-08-17_22-55-14_712_8459267361465490341/-mr-10004 + file:/tmp/jsichi/hive_2010-08-26_16-08-08_217_4989832071586172776/-mr-10004 Union Select Operator expressions: @@ -157,7 +157,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-55-14_712_8459267361465490341/-ext-10000 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-08-08_217_4989832071586172776/-ext-10000 Stage: Stage-0 Move Operator @@ -172,7 +172,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-55-14_712_8459267361465490341/-ext-10003 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-08-08_217_4989832071586172776/-ext-10003 File Output Operator compressed: false GlobalTableId: 0 @@ -251,8 +251,8 @@ LEFT OUTER JOIN src p2 ON (t2.key = p2.key)) j PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src PREHOOK: Output: default@dest_l1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_l1 SELECT j.* @@ -266,8 +266,8 @@ LEFT OUTER JOIN src p2 ON (t2.key = p2.key)) j POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src POSTHOOK: Output: default@dest_l1 POSTHOOK: Lineage: dest_l1.key EXPRESSION [(src1)t1.FieldSchema(name:key, type:string, comment:default), (src1)t2.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_l1.value EXPRESSION [(src)p2.FieldSchema(name:value, type:string, comment:default), (src)p1.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join36.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/join36.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/join36.q.out_0.17 (working copy) @@ -164,7 +164,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_12-59-14_616_4425396855967291614/-ext-10000 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-53-04_744_2069012305000904704/-ext-10000 Stage: Stage-0 Move Operator @@ -179,7 +179,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_12-59-14_616_4425396855967291614/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-53-04_744_2069012305000904704/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -209,15 +209,15 @@ SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt FROM tmp1 x JOIN tmp2 y ON (x.key = y.key) PREHOOK: type: QUERY +PREHOOK: Input: default@tmp1 PREHOOK: Input: default@tmp2 -PREHOOK: Input: default@tmp1 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt FROM tmp1 x JOIN tmp2 y ON (x.key = y.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@tmp1 POSTHOOK: Input: default@tmp2 -POSTHOOK: Input: default@tmp1 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] @@ -229,11 +229,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_12-59-18_888_3224355428368888304/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-53-08_462_2303969710395749873/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_12-59-18_888_3224355428368888304/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-53-08_462_2303969710395749873/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(tmp1)x.FieldSchema(name:cnt, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -83,7 +83,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -94,12 +94,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110840 + transient_lastDdlTime 1282863512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -153,7 +153,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -164,12 +164,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110840 + transient_lastDdlTime 1282863512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -213,7 +213,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -224,21 +224,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110840 + transient_lastDdlTime 1282863512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -252,13 +252,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110625 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -269,13 +269,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110625 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -287,14 +287,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -304,24 +304,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110840 + transient_lastDdlTime 1282863512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -332,21 +332,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110840 + transient_lastDdlTime 1282863512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-00_818_2637585125141547106/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-32_296_8733880232228770886/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -357,12 +357,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110840 + transient_lastDdlTime 1282863512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -373,12 +373,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110840 + transient_lastDdlTime 1282863512 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -389,18 +389,18 @@ FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.key = z.key and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@src PREHOOK: Input: default@src1 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.key = z.key and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] @@ -408,11 +408,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-54-06_221_7015198661017442674/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-58-37_050_6374384295174848949/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-54-06_221_7015198661017442674/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-58-37_050_6374384295174848949/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/input3.q.out =================================================================== --- ql/src/test/results/clientpositive/input3.q.out (revision 990244) +++ ql/src/test/results/clientpositive/input3.q.out (working copy) @@ -100,8 +100,8 @@ POSTHOOK: query: ALTER TABLE TEST3b RENAME TO TEST3c POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: default@test3b +POSTHOOK: Output: default@TEST3c POSTHOOK: Output: default@test3b -POSTHOOK: Output: default@TEST3c PREHOOK: query: DESCRIBE TEST3c PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE TEST3c @@ -160,4 +160,4 @@ r1 int r2 double -Detailed Table Information Table(tableName:test3c, dbName:default, owner:njain, createTime:1282028239, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:r1, type:int, comment:null), FieldSchema(name:r2, type:double, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/test3c, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, last_modified_time=1282028240, transient_lastDdlTime=1282028240}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:test3c, dbName:default, owner:jsichi, createTime:1282863111, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:r1, type:int, comment:null), FieldSchema(name:r2, type:double, comment:null)], location:pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/test3c, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=jsichi, last_modified_time=1282863112, transient_lastDdlTime=1282863112}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) Index: ql/src/test/results/clientpositive/smb_mapjoin_7.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_7.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_7.q.out (working copy) @@ -48,14 +48,14 @@ PREHOOK: query: insert overwrite table smb_join_results_empty_bigtable select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results_empty_bigtable POSTHOOK: query: insert overwrite table smb_join_results_empty_bigtable select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results_empty_bigtable POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -66,14 +66,14 @@ PREHOOK: query: insert overwrite table smb_join_results_empty_bigtable select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results_empty_bigtable POSTHOOK: query: insert overwrite table smb_join_results_empty_bigtable select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results_empty_bigtable POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -88,11 +88,11 @@ PREHOOK: query: select * from smb_join_results_empty_bigtable order by k1, v1, k2, v2 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results_empty_bigtable -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-50-43_427_8121040407367650033/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-05_497_2942078521176735166/-mr-10000 POSTHOOK: query: select * from smb_join_results_empty_bigtable order by k1, v1, k2, v2 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results_empty_bigtable -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-50-43_427_8121040407367650033/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-05_497_2942078521176735166/-mr-10000 POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: smb_join_results_empty_bigtable.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] @@ -692,14 +692,14 @@ PREHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@smb_join_results POSTHOOK: query: insert overwrite table smb_join_results select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@smb_join_results POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -718,11 +718,11 @@ PREHOOK: query: select * from smb_join_results order by k1, v1, k2, v2 PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-50-54_358_7035919660030544601/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-12_973_1372102537783813819/-mr-10000 POSTHOOK: query: select * from smb_join_results order by k1, v1, k2, v2 POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-50-54_358_7035919660030544601/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-12_973_1372102537783813819/-mr-10000 POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: smb_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] @@ -1239,13 +1239,13 @@ NULL NULL 498 val_498 PREHOOK: query: insert overwrite table normal_join_results select * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Output: default@normal_join_results POSTHOOK: query: insert overwrite table normal_join_results select * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Output: default@normal_join_results POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] @@ -1268,11 +1268,11 @@ PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results PREHOOK: type: QUERY PREHOOK: Input: default@normal_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-51-07_558_4585413126943846420/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-21_393_9150168126997644301/-mr-10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from normal_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@normal_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-51-07_558_4585413126943846420/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-21_393_9150168126997644301/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: normal_join_results.v1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] @@ -1295,11 +1295,11 @@ PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-51-11_602_8426676182344229346/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-24_503_4889415074353736512/-mr-10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-51-11_602_8426676182344229346/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-24_503_4889415074353736512/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: normal_join_results.v1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] @@ -1322,11 +1322,11 @@ PREHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results_empty_bigtable PREHOOK: type: QUERY PREHOOK: Input: default@smb_join_results_empty_bigtable -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-51-15_729_4579951655784934773/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-27_617_7238754568395999026/-mr-10000 POSTHOOK: query: select sum(hash(k1)) as k1, sum(hash(k2)) as k2, sum(hash(v1)) as v1, sum(hash(v2)) as v2 from smb_join_results_empty_bigtable POSTHOOK: type: QUERY POSTHOOK: Input: default@smb_join_results_empty_bigtable -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-51-15_729_4579951655784934773/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-45-27_617_7238754568395999026/-mr-10000 POSTHOOK: Lineage: normal_join_results.k1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: normal_join_results.k2 SIMPLE [(smb_bucket4_2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: normal_join_results.v1 SIMPLE [(smb_bucket4_1)a.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/skewjoin.q.out =================================================================== --- ql/src/test/results/clientpositive/skewjoin.q.out (revision 990244) +++ ql/src/test/results/clientpositive/skewjoin.q.out (working copy) @@ -236,11 +236,11 @@ PREHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1 PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-37-21_000_2671125070772953479/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-35-13_960_7342986927129350816/-mr-10000 POSTHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-37-21_000_2671125070772953479/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-35-13_960_7342986927129350816/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 278697 101852390308 @@ -386,21 +386,21 @@ JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key PREHOOK: type: QUERY -PREHOOK: Input: default@t4 +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-04_518_2033294341400244571/-mr-10000 +PREHOOK: Input: default@t4 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-35-52_022_1767201564777971953/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key POSTHOOK: type: QUERY -POSTHOOK: Input: default@t4 +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-04_518_2033294341400244571/-mr-10000 +POSTHOOK: Input: default@t4 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-35-52_022_1767201564777971953/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 2 12 2 22 2 12 2 12 @@ -546,21 +546,21 @@ JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key PREHOOK: type: QUERY -PREHOOK: Input: default@t4 +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-10_460_3051517674755082937/-mr-10000 +PREHOOK: Input: default@t4 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-35-57_280_3350316757246908252/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a,c) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key POSTHOOK: type: QUERY -POSTHOOK: Input: default@t4 +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-10_460_3051517674755082937/-mr-10000 +POSTHOOK: Input: default@t4 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-35-57_280_3350316757246908252/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 2 12 2 22 2 12 2 12 @@ -650,7 +650,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-16_967_5437652130828251791/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-36-02_136_6328495578232032954/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -695,12 +695,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-17_099_1067879277293261019/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-36-02_245_4669838590903193760/-mr-10000 POSTHOOK: query: FROM T1 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-17_099_1067879277293261019/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-36-02_245_4669838590903193760/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 198 6274 194 @@ -888,7 +888,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-28_069_4204177388671027934/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-36-08_862_197415476425996951/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -932,7 +932,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-28_276_728470981211329651/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-36-09_103_4871359110375863219/-mr-10000 POSTHOOK: query: FROM (SELECT src.* FROM src) x JOIN @@ -941,7 +941,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-38-28_276_728470981211329651/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-36-09_103_4871359110375863219/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 44481300 101852390308 @@ -1139,7 +1139,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-40-10_039_3701518454064987743/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-37-33_559_1683759911944574309/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -1183,7 +1183,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-40-10_305_7566971631583481386/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-37-33_809_6355273995807527985/-mr-10000 POSTHOOK: query: FROM (SELECT src.* FROM src) x JOIN @@ -1192,7 +1192,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-40-10_305_7566971631583481386/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-37-33_809_6355273995807527985/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] NULL NULL @@ -1470,7 +1470,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-41-13_763_5486455246013984944/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-38-18_954_1325306763981144720/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -1639,7 +1639,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-41-14_138_857225741486464066/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-38-19_434_8371276815803951732/-mr-10000 POSTHOOK: query: SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) FROM (SELECT src.key as c1, src.value as c2 from src) src1 @@ -1651,7 +1651,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-41-14_138_857225741486464066/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-38-19_434_8371276815803951732/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 293143 -136853010385 @@ -1728,7 +1728,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-29_281_5055738049430830069/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-39-13_370_294180111324718792/-mr-10002 Select Operator expressions: expr: _col0 @@ -1788,131 +1788,131 @@ PREHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-29_370_3600297360970542153/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-13_481_7679227732704511200/-mr-10000 POSTHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-29_370_3600297360970542153/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-13_481_7679227732704511200/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 372 6320 PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.val PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-36_144_5811855051099731026/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-19_425_6467489422191230676/-mr-10000 POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.val POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-36_144_5811855051099731026/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-19_425_6467489422191230676/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] NULL NULL PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-44_576_282167472451878689/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-25_221_617492689938074682/-mr-10000 POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-44_576_282167472451878689/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-25_221_617492689938074682/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 429 12643 PREHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-52_450_3199148070832612430/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-31_018_1528279542212500500/-mr-10000 POSTHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-42-52_450_3199148070832612430/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-31_018_1528279542212500500/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 429 12643 PREHOOK: query: select count(1) from T1 a join T1 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-04_335_5121628332368349916/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-40_020_8878047536635722237/-mr-10000 POSTHOOK: query: select count(1) from T1 a join T1 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-04_335_5121628332368349916/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-40_020_8878047536635722237/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 8 PREHOOK: query: FROM T1 a LEFT OUTER JOIN T2 c ON c.key+1=a.key SELECT sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-16_335_6156730727323887138/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-49_090_1421212582715517992/-mr-10000 POSTHOOK: query: FROM T1 a LEFT OUTER JOIN T2 c ON c.key+1=a.key SELECT sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-16_335_6156730727323887138/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-49_090_1421212582715517992/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 317 9462 50 PREHOOK: query: FROM T1 a RIGHT OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-25_726_4662996952485642353/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-55_254_6680876796938489109/-mr-10000 POSTHOOK: query: FROM T1 a RIGHT OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-25_726_4662996952485642353/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-39-55_254_6680876796938489109/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 51 1570 318 PREHOOK: query: FROM T1 a FULL OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-35_816_1937851827863127884/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-01_410_6993928537152626835/-mr-10000 POSTHOOK: query: FROM T1 a FULL OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-35_816_1937851827863127884/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-01_410_6993928537152626835/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 317 9462 318 PREHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 LEFT OUTER JOIN T2 src2 ON src1.key+1 = src2.key RIGHT OUTER JOIN T2 src3 ON src2.key = src3.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-45_577_4148142258509714415/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-07_611_5143289292874656865/-mr-10000 POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 LEFT OUTER JOIN T2 src2 ON src1.key+1 = src2.key RIGHT OUTER JOIN T2 src3 ON src2.key = src3.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-45_577_4148142258509714415/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-07_611_5143289292874656865/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 370 11003 377 PREHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 JOIN T2 src2 ON src1.key+1 = src2.key JOIN T2 src3 ON src2.key = src3.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-55_040_8769802363049508359/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-14_005_1103261153715368406/-mr-10000 POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 JOIN T2 src2 ON src1.key+1 = src2.key JOIN T2 src3 ON src2.key = src3.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-43-55_040_8769802363049508359/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-14_005_1103261153715368406/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 370 11003 377 PREHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k left outer join T1 v on k.key+1=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-13_748_8725230467902356167/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-27_339_4249543099897472280/-mr-10000 POSTHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k left outer join T1 v on k.key+1=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-44-13_748_8725230467902356167/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-40-27_339_4249543099897472280/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 372 6320 Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -130,7 +130,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,12 +141,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244652 + transient_lastDdlTime 1282861740 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -204,7 +204,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -215,12 +215,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244652 + transient_lastDdlTime 1282861740 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -229,15 +229,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -249,12 +249,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244647 + transient_lastDdlTime 1282861736 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -266,12 +266,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244647 + transient_lastDdlTime 1282861736 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -283,14 +283,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -300,24 +300,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244652 + transient_lastDdlTime 1282861740 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -328,21 +328,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244652 + transient_lastDdlTime 1282861740 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-12_274_1980024742830625697/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-00_664_6737222251329303138/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -353,12 +353,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244652 + transient_lastDdlTime 1282861740 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -369,12 +369,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244652 + transient_lastDdlTime 1282861740 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -385,16 +385,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] @@ -402,11 +402,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-23_244_8040718131761759652/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-07_690_2072687980435480607/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-23_244_8040718131761759652/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-07_690_2072687980435480607/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ] @@ -432,16 +432,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -455,11 +455,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-40_740_8575724777000826496/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-21_542_4741350660030347450/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-40_740_8575724777000826496/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-21_542_4741350660030347450/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -496,16 +496,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-47_983_680601809823939628/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-27_906_6588668366226639259/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-04-47_983_680601809823939628/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-27_906_6588668366226639259/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -603,7 +603,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -614,12 +614,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244680 + transient_lastDdlTime 1282861761 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -667,7 +667,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -678,12 +678,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244680 + transient_lastDdlTime 1282861761 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -692,15 +692,15 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -714,13 +714,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244650 + transient_lastDdlTime 1282861739 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -732,13 +732,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244650 + transient_lastDdlTime 1282861739 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -750,14 +750,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -767,24 +767,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244680 + transient_lastDdlTime 1282861761 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -795,21 +795,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244680 + transient_lastDdlTime 1282861761 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-04-52_656_5265784663909250593/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-29-31_208_6758488839295751961/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -820,12 +820,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244680 + transient_lastDdlTime 1282861761 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -836,12 +836,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244680 + transient_lastDdlTime 1282861761 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -852,16 +852,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -881,11 +881,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-03_742_730479191368201266/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-38_202_3729411326545213662/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-03_742_730479191368201266/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-38_202_3729411326545213662/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -935,16 +935,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b on a.key=b.key and b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] @@ -970,11 +970,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-21_674_7687888788603521153/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-52_089_4107376078253101346/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-21_674_7687888788603521153/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-52_089_4107376078253101346/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1035,16 +1035,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-29_088_1807289787318200894/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-58_456_5874614412150965390/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-29_088_1807289787318200894/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-29-58_456_5874614412150965390/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/cp_mj_rc.q.out =================================================================== --- ql/src/test/results/clientpositive/cp_mj_rc.q.out (revision 990244) +++ ql/src/test/results/clientpositive/cp_mj_rc.q.out (working copy) @@ -46,14 +46,14 @@ POSTHOOK: Lineage: src_two_columns.v1 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT /*+ MAPJOIN(six) */ six.*, two.k1 from src_six_columns six join src_two_columns two on (six.k3=two.k1) PREHOOK: type: QUERY +PREHOOK: Input: default@src_six_columns PREHOOK: Input: default@src_two_columns -PREHOOK: Input: default@src_six_columns -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-18-15_717_1117205058531623654/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-36-20_108_4263364980066320954/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(six) */ six.*, two.k1 from src_six_columns six join src_two_columns two on (six.k3=two.k1) POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_six_columns POSTHOOK: Input: default@src_two_columns -POSTHOOK: Input: default@src_six_columns -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-18-15_717_1117205058531623654/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-36-20_108_4263364980066320954/-mr-10000 POSTHOOK: Lineage: src_six_columns.k1 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: src_six_columns.k2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: src_six_columns.k3 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -64,14 +64,14 @@ POSTHOOK: Lineage: src_two_columns.v1 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT /*+ MAPJOIN(two) */ two.*, six.k3 from src_six_columns six join src_two_columns two on (six.k3=two.k1) PREHOOK: type: QUERY +PREHOOK: Input: default@src_six_columns PREHOOK: Input: default@src_two_columns -PREHOOK: Input: default@src_six_columns -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-18-18_496_6407982316158329228/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-36-23_213_3720062508928690752/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(two) */ two.*, six.k3 from src_six_columns six join src_two_columns two on (six.k3=two.k1) POSTHOOK: type: QUERY +POSTHOOK: Input: default@src_six_columns POSTHOOK: Input: default@src_two_columns -POSTHOOK: Input: default@src_six_columns -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-18-18_496_6407982316158329228/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-36-23_213_3720062508928690752/-mr-10000 POSTHOOK: Lineage: src_six_columns.k1 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: src_six_columns.k2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: src_six_columns.k3 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -84,7 +84,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -95,12 +95,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107930 + transient_lastDdlTime 1282863743 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -163,7 +163,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -174,12 +174,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107930 + transient_lastDdlTime 1282863743 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -232,7 +232,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -243,21 +243,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107930 + transient_lastDdlTime 1282863743 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -271,13 +271,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107918 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -288,13 +288,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107918 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -306,14 +306,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -323,24 +323,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107930 + transient_lastDdlTime 1282863743 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -351,21 +351,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107930 + transient_lastDdlTime 1282863743 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-30_478_3680463550283866445/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-23_861_1154531129947007530/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -376,12 +376,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107930 + transient_lastDdlTime 1282863743 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -392,12 +392,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107930 + transient_lastDdlTime 1282863743 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -409,9 +409,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 PREHOOK: type: QUERY -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@src PREHOOK: Input: default@src1 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value @@ -419,9 +419,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 POSTHOOK: type: QUERY -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] @@ -429,11 +429,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-05-36_854_44548465210414790/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-28_814_1337540899332242986/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-05-36_854_44548465210414790/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-28_814_1337540899332242986/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] @@ -676,7 +676,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -687,12 +687,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107936 + transient_lastDdlTime 1282863748 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -755,7 +755,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -766,12 +766,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107936 + transient_lastDdlTime 1282863748 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -824,7 +824,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -835,21 +835,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107936 + transient_lastDdlTime 1282863748 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -863,13 +863,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107918 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -880,13 +880,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107918 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -898,14 +898,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -915,24 +915,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107936 + transient_lastDdlTime 1282863748 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -943,21 +943,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107936 + transient_lastDdlTime 1282863748 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-05-47_771_3043845519123603817/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-02-38_736_8579991155336776655/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -968,12 +968,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107936 + transient_lastDdlTime 1282863748 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -984,12 +984,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282107936 + transient_lastDdlTime 1282863748 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -1001,9 +1001,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 PREHOOK: type: QUERY +PREHOOK: Input: default@src1_copy +PREHOOK: Input: default@src_copy PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@src_copy -PREHOOK: Input: default@src1_copy PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value @@ -1011,9 +1011,9 @@ JOIN srcpart z ON (x.key = z.key) WHERE z.ds='2008-04-08' and z.hr=11 POSTHOOK: type: QUERY +POSTHOOK: Input: default@src1_copy +POSTHOOK: Input: default@src_copy POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@src_copy -POSTHOOK: Input: default@src1_copy POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] @@ -1028,11 +1028,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-05-53_876_2301865258012081792/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-43_530_7872552317980540717/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-05-53_876_2301865258012081792/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-02-43_530_7872552317980540717/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/smb_mapjoin_2.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_2.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_2.q.out (working copy) @@ -96,14 +96,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-02_694_3636391872212461439/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-08_051_1517169915526953562/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-02_694_3636391872212461439/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-08_051_1517169915526953562/-mr-10000 4 val_4 4 val_4 10 val_10 10 val_10 PREHOOK: query: explain @@ -174,14 +174,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a left outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-07_018_7301752522071852777/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-11_406_8228057689821948556/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a left outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-07_018_7301752522071852777/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-11_406_8228057689821948556/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 4 val_4 @@ -255,14 +255,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a right outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-11_316_7090256510661284902/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-14_848_6924756896360481597/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a right outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-11_316_7090256510661284902/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-14_848_6924756896360481597/-mr-10000 4 val_4 4 val_4 10 val_10 10 val_10 NULL NULL 17 val_17 @@ -337,14 +337,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a full outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-15_627_5055331353742537985/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-18_391_399582856608731174/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_1 a full outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-15_627_5055331353742537985/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-18_391_399582856608731174/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 4 val_4 @@ -422,14 +422,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-20_888_3104242973720820092/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-21_776_1636141058720440205/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-20_888_3104242973720820092/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-21_776_1636141058720440205/-mr-10000 4 val_4 4 val_4 10 val_10 10 val_10 PREHOOK: query: explain @@ -500,14 +500,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a left outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-25_305_6648820221031980381/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-25_168_549585829862028089/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a left outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-25_305_6648820221031980381/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-25_168_549585829862028089/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 4 val_4 @@ -581,14 +581,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a right outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-29_574_4161973268171742510/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-28_502_3489077955725492690/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a right outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-29_574_4161973268171742510/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-28_502_3489077955725492690/-mr-10000 4 val_4 4 val_4 10 val_10 10 val_10 NULL NULL 17 val_17 @@ -663,14 +663,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a full outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-33_942_7132087148038788477/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-31_892_5703704492172255426/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_1 a full outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-33_942_7132087148038788477/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-31_892_5703704492172255426/-mr-10000 1 val_1 NULL NULL 3 val_3 NULL NULL 4 val_4 4 val_4 Index: ql/src/test/results/clientpositive/ppd_multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_multi_insert.q.out (revision 990244) +++ ql/src/test/results/clientpositive/ppd_multi_insert.q.out (working copy) @@ -216,10 +216,10 @@ INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300 PREHOOK: type: QUERY PREHOOK: Input: default@src +PREHOOK: Output: ../build/ql/test/data/warehouse/mi4.out PREHOOK: Output: default@mi1 PREHOOK: Output: default@mi2 PREHOOK: Output: default@mi3@ds=2008-04-08/hr=12 -PREHOOK: Output: ../build/ql/test/data/warehouse/mi4.out POSTHOOK: query: FROM src a JOIN src b ON (a.key = b.key) INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100 INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200 @@ -227,10 +227,10 @@ INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300 POSTHOOK: type: QUERY POSTHOOK: Input: default@src +POSTHOOK: Output: ../build/ql/test/data/warehouse/mi4.out POSTHOOK: Output: default@mi1 POSTHOOK: Output: default@mi2 POSTHOOK: Output: default@mi3@ds=2008-04-08/hr=12 -POSTHOOK: Output: ../build/ql/test/data/warehouse/mi4.out POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -239,11 +239,11 @@ PREHOOK: query: SELECT mi1.* FROM mi1 PREHOOK: type: QUERY PREHOOK: Input: default@mi1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-46-54_953_7860817338533878265/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-24-32_780_6038103351737463067/-mr-10000 POSTHOOK: query: SELECT mi1.* FROM mi1 POSTHOOK: type: QUERY POSTHOOK: Input: default@mi1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-46-54_953_7860817338533878265/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-24-32_780_6038103351737463067/-mr-10000 POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -400,11 +400,11 @@ PREHOOK: query: SELECT mi2.* FROM mi2 PREHOOK: type: QUERY PREHOOK: Input: default@mi2 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-46-55_006_6229034219349396892/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-24-33_072_1447950994639415029/-mr-10000 POSTHOOK: query: SELECT mi2.* FROM mi2 POSTHOOK: type: QUERY POSTHOOK: Input: default@mi2 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-46-55_006_6229034219349396892/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-24-33_072_1447950994639415029/-mr-10000 POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -624,11 +624,11 @@ PREHOOK: query: SELECT mi3.* FROM mi3 PREHOOK: type: QUERY PREHOOK: Input: default@mi3@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-46-55_057_3846144917217046584/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-24-33_365_8344416518806586394/-mr-10000 POSTHOOK: query: SELECT mi3.* FROM mi3 POSTHOOK: type: QUERY POSTHOOK: Input: default@mi3@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_11-46-55_057_3846144917217046584/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-24-33_365_8344416518806586394/-mr-10000 POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/semijoin.q.out =================================================================== --- ql/src/test/results/clientpositive/semijoin.q.out (revision 990244) +++ ql/src/test/results/clientpositive/semijoin.q.out (working copy) @@ -8,11 +8,11 @@ PREHOOK: query: select * from t1 sort by key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-31-56_945_3977812479935651025/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-30-50_167_4710362643764304912/-mr-10000 POSTHOOK: query: select * from t1 sort by key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-31-56_945_3977812479935651025/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-30-50_167_4710362643764304912/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -34,11 +34,11 @@ PREHOOK: query: select * from t2 sort by key PREHOOK: type: QUERY PREHOOK: Input: default@t2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-07_980_8279539228492626989/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-30-56_490_1305333221881884322/-mr-10000 POSTHOOK: query: select * from t2 sort by key POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-07_980_8279539228492626989/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-30-56_490_1305333221881884322/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -52,21 +52,21 @@ 20 val_10 PREHOOK: query: create table t3 as select * from (select * from t1 union all select * from t2) b PREHOOK: type: CREATETABLE +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 POSTHOOK: query: create table t3 as select * from (select * from t1 union all select * from t2) b POSTHOOK: type: CREATETABLE +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 POSTHOOK: Output: default@t3 PREHOOK: query: select * from t3 sort by key, value PREHOOK: type: QUERY PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-20_567_3140026584812908154/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-05_939_5708575653370225816/-mr-10000 POSTHOOK: query: select * from t3 sort by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-20_567_3140026584812908154/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-05_939_5708575653370225816/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -97,11 +97,11 @@ PREHOOK: query: select * from t4 PREHOOK: type: QUERY PREHOOK: Input: default@t4 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-25_015_2952548280591215309/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-09_089_5243149004986287422/-mr-10000 POSTHOOK: query: select * from t4 POSTHOOK: type: QUERY POSTHOOK: Input: default@t4 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-25_015_2952548280591215309/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-09_089_5243149004986287422/-mr-10000 PREHOOK: query: explain select * from t1 a left semi join t2 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY POSTHOOK: query: explain select * from t1 a left semi join t2 b on a.key=b.key sort by a.key, a.value @@ -185,7 +185,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-25_103_1392993488798067716/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-09_221_5886345330918235176/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -215,14 +215,14 @@ PREHOOK: query: select * from t1 a left semi join t2 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-25_507_672234473019697329/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-09_324_7400916651438089839/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t2 b on a.key=b.key sort by a.key, a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-25_507_672234473019697329/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-09_324_7400916651438089839/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -312,7 +312,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-33_848_7018221642451774467/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-15_398_6184531515584998832/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -344,12 +344,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-34_004_7788048860411280876/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-15_505_8084865171579203366/-mr-10000 POSTHOOK: query: select * from t2 a left semi join t1 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-34_004_7788048860411280876/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-15_505_8084865171579203366/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -441,7 +441,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-42_102_7156671426907165640/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-21_590_8928586964867939357/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -471,14 +471,14 @@ PREHOOK: query: select * from t1 a left semi join t4 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t4 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-42_218_8723575608296194278/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-21_694_7949704596850731671/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t4 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t4 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-42_218_8723575608296194278/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-21_694_7949704596850731671/-mr-10000 PREHOOK: query: explain select a.value from t1 a left semi join t3 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY POSTHOOK: query: explain select a.value from t1 a left semi join t3 b on (b.key = a.key and b.key < '15') sort by a.value @@ -568,7 +568,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-51_096_4623214549063353202/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-27_718_982050412756211456/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -594,14 +594,14 @@ PREHOOK: query: select a.value from t1 a left semi join t3 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-51_185_4682423418266607663/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-27_822_1350911842616975350/-mr-10000 POSTHOOK: query: select a.value from t1 a left semi join t3 b on (b.key = a.key and b.key < '15') sort by a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-32-51_185_4682423418266607663/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-27_822_1350911842616975350/-mr-10000 val_0 val_0 val_0 @@ -708,7 +708,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-00_658_3638135161738749845/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-33_923_9214921681958097012/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -738,14 +738,14 @@ PREHOOK: query: select * from t1 a left semi join t2 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-00_763_6634156825342170288/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-34_031_5185180184218172844/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t2 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-00_763_6634156825342170288/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-34_031_5185180184218172844/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -841,7 +841,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-10_417_7578367697020753062/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-40_153_883351923688146736/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -869,12 +869,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-10_537_4948371486177162830/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-40_268_6201543529188681953/-mr-10000 POSTHOOK: query: select a.value from t1 a left semi join (select key from t3 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-10_537_4948371486177162830/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-40_268_6201543529188681953/-mr-10000 val_10 val_8 val_9 @@ -980,7 +980,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-20_289_4234953905040662046/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-46_312_3993050304389236152/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1006,14 +1006,14 @@ PREHOOK: query: select a.value from t1 a left semi join (select key , value from t2 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-20_404_9054226561644712354/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-46_425_3381490323707009880/-mr-10000 POSTHOOK: query: select a.value from t1 a left semi join (select key , value from t2 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-20_404_9054226561644712354/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-46_425_3381490323707009880/-mr-10000 PREHOOK: query: explain select * from t2 a left semi join (select key , value from t1 where key > 2) b on a.key = b.key sort by a.key, a.value PREHOOK: type: QUERY POSTHOOK: query: explain select * from t2 a left semi join (select key , value from t1 where key > 2) b on a.key = b.key sort by a.key, a.value @@ -1110,7 +1110,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-29_819_4830332805438806811/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-52_549_6755746237152848554/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1142,12 +1142,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-29_962_4637203536100177562/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-52_659_5464756953472677859/-mr-10000 POSTHOOK: query: select * from t2 a left semi join (select key , value from t1 where key > 2) b on a.key = b.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-29_962_4637203536100177562/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-52_659_5464756953472677859/-mr-10000 4 val_2 8 val_4 10 val_5 @@ -1234,7 +1234,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-39_814_3237757741903688182/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-31-58_752_2377854145526588776/-mr-10002 Select Operator expressions: expr: _col0 @@ -1272,12 +1272,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-39_949_3832492951103179833/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-58_854_7200265512841921451/-mr-10000 POSTHOOK: query: select /*+ mapjoin(b) */ a.key from t3 a left semi join t1 b on a.key = b.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-39_949_3832492951103179833/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-31-58_854_7200265512841921451/-mr-10000 0 0 0 @@ -1380,7 +1380,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-47_334_8659909258995059282/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-04_643_2338407986226403980/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1410,14 +1410,14 @@ PREHOOK: query: select * from t1 a left semi join t2 b on a.key = 2*b.key sort by a.key, a.value PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-47_442_5393165062196597981/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-04_744_623541634907516161/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t2 b on a.key = 2*b.key sort by a.key, a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-33-47_442_5393165062196597981/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-04_744_623541634907516161/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -1528,7 +1528,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-00_532_862914177595416183/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-10_767_3479460229051595720/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1562,16 +1562,16 @@ PREHOOK: query: select * from t1 a join t2 b on a.key = b.key left semi join t3 c on b.key = c.key sort by a.key, a.value PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-00_669_2146718815299380705/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-10_884_4563457848546641930/-mr-10000 POSTHOOK: query: select * from t1 a join t2 b on a.key = b.key left semi join t3 c on b.key = c.key sort by a.key, a.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-00_669_2146718815299380705/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-10_884_4563457848546641930/-mr-10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 @@ -1681,7 +1681,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-10_154_8713164422099440585/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-18_210_7397281763659818293/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1713,12 +1713,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-10_285_5690346901938522778/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-18_313_6529433048035998250/-mr-10000 POSTHOOK: query: select * from t3 a left semi join t1 b on a.key = b.key and a.value=b.value sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-10_285_5690346901938522778/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-18_313_6529433048035998250/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -1859,7 +1859,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-19_632_5616980801714535288/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-24_354_7818817395350959657/-mr-10002 Select Operator expressions: expr: _col0 @@ -1898,13 +1898,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-19_800_5503128702886670226/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-24_474_1477633509986082418/-mr-10000 POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3 a left semi join t1 b on a.key = b.key left semi join t2 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-19_800_5503128702886670226/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-24_474_1477633509986082418/-mr-10000 0 0 0 @@ -2012,7 +2012,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-27_599_2523062859748071251/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-30_284_6051624992600368837/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2041,13 +2041,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-27_734_280601286647555771/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-30_400_7335870280927099010/-mr-10000 POSTHOOK: query: select a.key from t3 a left outer join t1 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-27_734_280601286647555771/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-30_400_7335870280927099010/-mr-10000 0 0 0 @@ -2167,7 +2167,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-38_081_8628736863899363243/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-37_758_3950677286047860567/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2193,16 +2193,16 @@ PREHOOK: query: select a.key from t1 a right outer join t3 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 +PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-38_206_6481133578520858234/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-37_871_2454366781942838266/-mr-10000 POSTHOOK: query: select a.key from t1 a right outer join t3 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 +POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-38_206_6481133578520858234/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-37_871_2454366781942838266/-mr-10000 NULL NULL NULL @@ -2325,7 +2325,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-47_226_8207416263682466389/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-45_262_5992955238945717538/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2351,16 +2351,16 @@ PREHOOK: query: select a.key from t1 a full outer join t3 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 +PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-47_317_2493886120219911769/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-45_380_6139341223545736201/-mr-10000 POSTHOOK: query: select a.key from t1 a full outer join t3 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 +POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-47_317_2493886120219911769/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-45_380_6139341223545736201/-mr-10000 NULL NULL NULL @@ -2483,7 +2483,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-58_249_8286755896825534965/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-32-52_808_8193104458011818678/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2509,16 +2509,16 @@ PREHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.key = c.key sort by a.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-58_371_725595106575971018/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-52_924_3358318124911522745/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-34-58_371_725595106575971018/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-32-52_924_3358318124911522745/-mr-10000 0 0 0 @@ -2641,7 +2641,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-09_782_1244256265233886249/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-33-00_334_3947388254240326821/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2667,16 +2667,16 @@ PREHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key right outer join t1 c on a.key = c.key sort by a.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-09_879_5062879341680309052/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-33-00_450_7944705166831975761/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key right outer join t1 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-09_879_5062879341680309052/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-33-00_450_7944705166831975761/-mr-10000 NULL NULL NULL @@ -2801,7 +2801,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-19_402_5459989887173784573/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_16-33-07_801_4655936433608376440/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2830,13 +2830,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-19_511_7936884878847745536/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-33-07_917_7933086183001476835/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t1 b on a.key = b.key full outer join t2 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-19_511_7936884878847745536/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-33-07_917_7933086183001476835/-mr-10000 NULL NULL NULL @@ -3005,7 +3005,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-29_028_3549801595991009687/-mr-10003 + file:/tmp/jsichi/hive_2010-08-26_16-33-15_319_6054233081828350725/-mr-10003 Reduce Output Operator key expressions: expr: _col0 @@ -3031,16 +3031,16 @@ PREHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.value = c.value sort by a.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-29_134_5024788535763869568/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-33-15_444_3464952629462504820/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.value = c.value sort by a.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-35-29_134_5024788535763869568/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-33-15_444_3464952629462504820/-mr-10000 0 0 0 Index: ql/src/test/results/clientpositive/join18.q.out =================================================================== --- ql/src/test/results/clientpositive/join18.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join18.q.out (working copy) @@ -231,9 +231,9 @@ ) b ON (a.key = b.key) PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/205671669/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-57-30_453_6304651443448748016/-mr-10000 POSTHOOK: query: SELECT a.key, a.value, b.key, b.value FROM ( @@ -246,9 +246,9 @@ ) b ON (a.key = b.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_2/build/ql/tmp/205671669/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-57-30_453_6304651443448748016/-mr-10000 NULL NULL 7 0 3 NULL NULL 10 1 NULL NULL Index: ql/src/test/results/clientpositive/smb_mapjoin_8.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_8.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_8.q.out (working copy) @@ -47,14 +47,14 @@ POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-34_137_8141051139723931378/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-23-26_339_3909337999945942045/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-34_137_8141051139723931378/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-23-26_339_3909337999945942045/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -67,14 +67,14 @@ NULL NULL 5000 val_125 PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-36_691_9046607359163451591/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-23-38_317_508151892313324438/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-36_691_9046607359163451591/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-23-38_317_508151892313324438/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_2.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -117,14 +117,14 @@ POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-44_394_7557765393788088271/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-24-31_952_8374542834959139365/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-44_394_7557765393788088271/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-24-31_952_8374542834959139365/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] @@ -139,14 +139,14 @@ NULL NULL 5000 val_125 PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-46_918_7232119579754498838/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-24-43_990_3126143867389068365/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-46_918_7232119579754498838/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-24-43_990_3126143867389068365/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] @@ -199,14 +199,14 @@ POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-54_681_4952863565750203661/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-25-38_661_7409288226979635465/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-54_681_4952863565750203661/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-25-38_661_7409288226979635465/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -223,14 +223,14 @@ NULL NULL 5000 val_125 PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-57_249_6696803477494534544/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-25-50_868_4035898613792109463/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-02-57_249_6696803477494534544/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-25-50_868_4035898613792109463/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -293,14 +293,14 @@ POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:value, type:string, comment:from deserializer), ] PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-05_070_1157762852634375607/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-26-45_411_1957736509270496141/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-05_070_1157762852634375607/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-26-45_411_1957736509270496141/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -322,14 +322,14 @@ NULL NULL 5000 val_125 PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-07_631_8246437502603272669/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-26-57_373_7830701927735743689/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-07_631_8246437502603272669/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-26-57_373_7830701927735743689/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -436,17 +436,17 @@ PREHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 PREHOOK: Input: default@smb_bucket4_3 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-18_035_8194510363795325400/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-28-12_376_3195316619132079809/-mr-10000 POSTHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_3 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-18_035_8194510363795325400/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-28-12_376_3195316619132079809/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -578,17 +578,17 @@ PREHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 PREHOOK: Input: default@smb_bucket4_3 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-28_546_3828350776248819685/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-29-27_919_6526558082466570828/-mr-10000 POSTHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_3 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-28_546_3828350776248819685/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-29-27_919_6526558082466570828/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -743,17 +743,17 @@ PREHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 PREHOOK: Input: default@smb_bucket4_3 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-38_980_3157892661865975232/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-30-43_540_1966536991630908087/-mr-10000 POSTHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_3 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-38_980_3157892661865975232/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-30-43_540_1966536991630908087/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -931,17 +931,17 @@ PREHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 PREHOOK: Input: default@smb_bucket4_3 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-49_348_5700728228920265228/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-31-58_568_6938740676741659181/-mr-10000 POSTHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_3 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-49_348_5700728228920265228/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-31-58_568_6938740676741659181/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] @@ -1143,17 +1143,17 @@ PREHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket4_1 PREHOOK: Input: default@smb_bucket4_2 PREHOOK: Input: default@smb_bucket4_3 -PREHOOK: Input: default@smb_bucket4_1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-59_852_8218593296814549763/10000 +PREHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-33-14_722_2351975474311252846/-mr-10000 POSTHOOK: query: select /*+mapjoin(b,c)*/ * from smb_bucket4_1 a full outer join smb_bucket4_2 b on a.key = b.key full outer join smb_bucket4_3 c on a.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket4_1 POSTHOOK: Input: default@smb_bucket4_2 POSTHOOK: Input: default@smb_bucket4_3 -POSTHOOK: Input: default@smb_bucket4_1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_12-03-59_852_8218593296814549763/10000 +POSTHOOK: Output: hdfs://localhost.localdomain:47035/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-33-14_722_2351975474311252846/-mr-10000 POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] POSTHOOK: Lineage: smb_bucket4_1.key SIMPLE [(smb_bucket_input)smb_bucket_input.FieldSchema(name:key, type:int, comment:from deserializer), ] Index: ql/src/test/results/clientpositive/mapjoin_subquery.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_subquery.q.out (revision 990244) +++ ql/src/test/results/clientpositive/mapjoin_subquery.q.out (working copy) @@ -206,20 +206,20 @@ FROM src1 x JOIN src y ON (x.key = y.key)) subq JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_19-27-29_426_1006789162213353166/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-14_212_7052833350565905154/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(z) */ subq.key1, z.value FROM (SELECT /*+ MAPJOIN(x) */ x.key as key1, x.value as value1, y.key as key2, y.value as value2 FROM src1 x JOIN src y ON (x.key = y.key)) subq JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_19-27-29_426_1006789162213353166/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-14_212_7052833350565905154/-mr-10000 238 val_238 238 val_238 311 val_311 @@ -488,7 +488,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_19-27-34_660_8681994720023430294/10002 + file:/tmp/jsichi/hive_2010-08-26_16-14-18_425_4598079873799409050/-mr-10002 Select Operator expressions: expr: _col0 @@ -535,10 +535,10 @@ JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) order by subq.key1 PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_19-27-35_605_8033797734197412586/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-18_581_5217007605308966204/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(z) */ subq.key1, z.value FROM (SELECT /*+ MAPJOIN(x) */ x.key as key1, x.value as value1, y.key as key2, y.value as value2 @@ -546,10 +546,10 @@ JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) order by subq.key1 POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-03-15_19-27-35_605_8033797734197412586/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-14-18_581_5217007605308966204/-mr-10000 128 val_128 128 val_128 128 val_128 Index: ql/src/test/results/clientpositive/join36.q.out =================================================================== --- ql/src/test/results/clientpositive/join36.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join36.q.out (working copy) @@ -164,7 +164,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-09-43_090_1020814150085049302/-ext-10000 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-00-31_187_3818554291798258801/-ext-10000 Stage: Stage-0 Move Operator @@ -179,7 +179,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-09-43_090_1020814150085049302/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-00-31_187_3818554291798258801/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -194,15 +194,15 @@ SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt FROM tmp1 x JOIN tmp2 y ON (x.key = y.key) PREHOOK: type: QUERY +PREHOOK: Input: default@tmp1 PREHOOK: Input: default@tmp2 -PREHOOK: Input: default@tmp1 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt FROM tmp1 x JOIN tmp2 y ON (x.key = y.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@tmp1 POSTHOOK: Input: default@tmp2 -POSTHOOK: Input: default@tmp1 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] @@ -214,11 +214,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-09-48_080_5142795017860273901/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-00-34_763_7222853945015683789/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-09-48_080_5142795017860273901/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-00-34_763_7222853945015683789/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(tmp1)x.FieldSchema(name:cnt, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -140,7 +140,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -151,12 +151,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244740 + transient_lastDdlTime 1282861809 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -214,7 +214,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -225,12 +225,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244740 + transient_lastDdlTime 1282861809 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -239,15 +239,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket22.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket23.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -261,13 +261,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244738 + transient_lastDdlTime 1282861807 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -279,13 +279,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244738 + transient_lastDdlTime 1282861807 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -297,14 +297,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -314,24 +314,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244740 + transient_lastDdlTime 1282861809 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -342,21 +342,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244740 + transient_lastDdlTime 1282861809 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-05-40_493_1154743087451063265/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-09_554_7996001089560004879/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -367,12 +367,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244740 + transient_lastDdlTime 1282861809 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -383,12 +383,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244740 + transient_lastDdlTime 1282861809 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -416,11 +416,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-51_024_8723648334779793893/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-17_987_7809424755525209564/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-05-51_024_8723648334779793893/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-17_987_7809424755525209564/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -469,11 +469,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-09_720_6395019060412507719/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-31_954_5628788611181333885/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-09_720_6395019060412507719/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-31_954_5628788611181333885/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -510,16 +510,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-18_779_7676217201318798633/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-38_351_5147508732271519476/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-18_779_7676217201318798633/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-38_351_5147508732271519476/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -617,7 +617,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -628,12 +628,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244769 + transient_lastDdlTime 1282861831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -691,7 +691,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -702,12 +702,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244769 + transient_lastDdlTime 1282861831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -716,17 +716,17 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt], srcbucket22.txt=[srcbucket22.txt], srcbucket23.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + a {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -740,13 +740,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244736 + transient_lastDdlTime 1282861804 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -758,13 +758,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244736 + transient_lastDdlTime 1282861804 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -776,14 +776,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -793,24 +793,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244769 + transient_lastDdlTime 1282861831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -821,21 +821,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244769 + transient_lastDdlTime 1282861831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-06-23_507_5465528521487362170/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-30-41_651_4990138249603747064/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -846,12 +846,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244769 + transient_lastDdlTime 1282861831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -862,12 +862,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244769 + transient_lastDdlTime 1282861831 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -907,11 +907,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-35_071_4727224783422846924/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-50_259_6194219508540281037/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-35_071_4727224783422846924/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-30-50_259_6194219508540281037/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -996,11 +996,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-54_525_8861324843329024374/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-05_693_4794557309886837554/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-06-54_525_8861324843329024374/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-05_693_4794557309886837554/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -1061,16 +1061,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-01_940_2107644367519418093/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-12_070_2692866740463855531/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-01_940_2107644367519418093/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-12_070_2692866740463855531/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out_0.17 (working copy) @@ -137,7 +137,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -148,12 +148,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351733 + transient_lastDdlTime 1282940312 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -213,7 +213,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -224,12 +224,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351733 + transient_lastDdlTime 1282940312 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -238,15 +238,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket21.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -258,12 +258,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351728 + transient_lastDdlTime 1282940308 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -275,12 +275,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351728 + transient_lastDdlTime 1282940308 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -292,14 +292,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -309,20 +309,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351733 + transient_lastDdlTime 1282940312 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -338,9 +338,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -351,12 +351,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351733 + transient_lastDdlTime 1282940312 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -367,12 +367,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351733 + transient_lastDdlTime 1282940312 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -381,7 +381,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-48-53_041_3311429946518248453/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-18-32_753_1427220055110185291/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -392,12 +392,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351733 + transient_lastDdlTime 1282940312 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -409,16 +409,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] @@ -426,11 +426,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-05_646_118825368172921082/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-18-41_448_423478493834178083/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-05_646_118825368172921082/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-18-41_448_423478493834178083/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -456,16 +456,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(b)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -479,11 +479,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-25_730_803579793199714997/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-18-58_469_3798558653738614828/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-25_730_803579793199714997/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-18-58_469_3798558653738614828/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -520,16 +520,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-33_707_8850503758956028234/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-05_137_9174965708481957933/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-33_707_8850503758956028234/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-05_137_9174965708481957933/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -629,7 +629,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -640,12 +640,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351765 + transient_lastDdlTime 1282940338 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -700,7 +700,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -711,12 +711,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351765 + transient_lastDdlTime 1282940338 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -725,17 +725,17 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -749,13 +749,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351728 + transient_lastDdlTime 1282940308 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -767,13 +767,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351728 + transient_lastDdlTime 1282940308 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -785,14 +785,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -802,20 +802,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351765 + transient_lastDdlTime 1282940338 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -831,9 +831,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -844,12 +844,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351765 + transient_lastDdlTime 1282940338 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -860,12 +860,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351765 + transient_lastDdlTime 1282940338 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -874,7 +874,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-20_17-49-39_071_8446810922364580884/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-19-08_806_3545018008696802629/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -885,12 +885,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282351765 + transient_lastDdlTime 1282940338 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -902,16 +902,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -931,11 +931,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-51_507_100074607144948729/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-19_260_1730483222544165928/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-49-51_507_100074607144948729/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-19_260_1730483222544165928/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -985,16 +985,16 @@ from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -PREHOOK: Input: default@srcbucket_mapjoin PREHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result select /*+mapjoin(a)*/ a.key, a.value, b.value from srcbucket_mapjoin a join srcbucket_mapjoin_part b on a.key=b.key where b.ds="2008-04-08" POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08 -POSTHOOK: Input: default@srcbucket_mapjoin POSTHOOK: Output: default@bucketmapjoin_tmp_result POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] @@ -1020,11 +1020,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-50-13_823_1597223111337730499/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-36_956_5377515740348568787/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-50-13_823_1597223111337730499/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-36_956_5377515740348568787/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1085,16 +1085,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-50-21_243_3279869282423168068/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-43_601_3699579635282471240/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-20_17-50-21_243_3279869282423168068/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-19-43_601_3699579635282471240/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/union21.q.out =================================================================== --- ql/src/test/results/clientpositive/union21.q.out (revision 990244) +++ ql/src/test/results/clientpositive/union21.q.out (working copy) @@ -261,9 +261,9 @@ ) union_output GROUP BY key PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src_thrift -PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-50-40_879_1035612367995200636/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-09-13_745_2040382884129754790/-mr-10000 POSTHOOK: query: SELECT key, count(1) FROM ( SELECT '1' as key from src @@ -278,9 +278,9 @@ ) union_output GROUP BY key POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src_thrift -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-50-40_879_1035612367995200636/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-09-13_745_2040382884129754790/-mr-10000 NULL 2 0 7 001 2 Index: ql/src/test/results/clientpositive/input13.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/input13.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/input13.q.out_0.17 (working copy) @@ -153,7 +153,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10000 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10000 Stage: Stage-0 Move Operator @@ -168,7 +168,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10007 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10007 Reduce Output Operator sort order: Map-reduce partition columns: @@ -198,7 +198,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10002 Stage: Stage-1 Move Operator @@ -213,7 +213,7 @@ Stage: Stage-8 Map Reduce Alias -> Map Operator Tree: - file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10008 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10008 Reduce Output Operator sort order: Map-reduce partition columns: @@ -243,7 +243,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10004 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10004 Stage: Stage-2 Move Operator @@ -261,7 +261,7 @@ Stage: Stage-11 Map Reduce Alias -> Map Operator Tree: - file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10009 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10009 Reduce Output Operator sort order: Map-reduce partition columns: @@ -289,7 +289,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10006 + destination: file:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10006 Stage: Stage-3 Move Operator @@ -300,7 +300,7 @@ Stage: Stage-14 Map Reduce Alias -> Map Operator Tree: - file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-37_525_6254739333945514341/10010 + file:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-42-14_916_4627462906286154354/-ext-10010 Reduce Output Operator sort order: Map-reduce partition columns: @@ -327,10 +327,10 @@ INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300 PREHOOK: type: QUERY PREHOOK: Input: default@src +PREHOOK: Output: ../build/ql/test/data/warehouse/dest4.out PREHOOK: Output: default@dest1 PREHOOK: Output: default@dest2 PREHOOK: Output: default@dest3@ds=2008-04-08/hr=12 -PREHOOK: Output: ../build/ql/test/data/warehouse/dest4.out POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 @@ -338,10 +338,10 @@ INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300 POSTHOOK: type: QUERY POSTHOOK: Input: default@src +POSTHOOK: Output: ../build/ql/test/data/warehouse/dest4.out POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 POSTHOOK: Output: default@dest3@ds=2008-04-08/hr=12 -POSTHOOK: Output: ../build/ql/test/data/warehouse/dest4.out POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -350,11 +350,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-44_182_8623658788484294220/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-42-19_843_3168947862547762965/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-44_182_8623658788484294220/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-42-19_843_3168947862547762965/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -447,11 +447,11 @@ PREHOOK: query: SELECT dest2.* FROM dest2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-44_250_6958627549079394486/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-42-20_356_1457870921078810750/-mr-10000 POSTHOOK: query: SELECT dest2.* FROM dest2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-44_250_6958627549079394486/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-42-20_356_1457870921078810750/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -565,11 +565,11 @@ PREHOOK: query: SELECT dest3.* FROM dest3 PREHOOK: type: QUERY PREHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-44_319_42913572748613484/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-42-20_869_5121758791192196339/-mr-10000 POSTHOOK: query: SELECT dest3.* FROM dest3 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-11-44_319_42913572748613484/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-42-20_869_5121758791192196339/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/smb_mapjoin_3.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_3.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_3.q.out (working copy) @@ -96,14 +96,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-40_508_4960774771566994787/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-38_771_3879635553697278070/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-40_508_4960774771566994787/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-38_771_3879635553697278070/-mr-10000 20 val_20 20 val_20 23 val_23 23 val_23 PREHOOK: query: explain @@ -174,14 +174,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a left outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-45_277_5700990852273275881/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-42_175_886968397506206019/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a left outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-45_277_5700990852273275881/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-42_175_886968397506206019/-mr-10000 20 val_20 20 val_20 23 val_23 23 val_23 25 val_25 NULL NULL @@ -254,14 +254,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a right outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-49_340_7691095813207554920/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-45_524_6290794323386946551/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a right outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-49_340_7691095813207554920/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-45_524_6290794323386946551/-mr-10000 NULL NULL 4 val_4 NULL NULL 10 val_10 NULL NULL 17 val_17 @@ -336,14 +336,14 @@ PREHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a full outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-53_191_8130624523560369131/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-48_918_3166616767640535229/-mr-10000 POSTHOOK: query: select /*+mapjoin(a)*/ * from smb_bucket_2 a full outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-53_191_8130624523560369131/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-48_918_3166616767640535229/-mr-10000 NULL NULL 4 val_4 NULL NULL 10 val_10 NULL NULL 17 val_17 @@ -420,14 +420,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-57_456_6504839502595322531/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-52_312_7305863110671678312/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-45-57_456_6504839502595322531/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-52_312_7305863110671678312/-mr-10000 20 val_20 20 val_20 23 val_23 23 val_23 PREHOOK: query: explain @@ -498,14 +498,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a left outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-01_597_2602358804194838555/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-55_715_1121536371179061971/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a left outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-01_597_2602358804194838555/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-55_715_1121536371179061971/-mr-10000 20 val_20 20 val_20 23 val_23 23 val_23 25 val_25 NULL NULL @@ -578,14 +578,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a right outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-05_884_8956877303331909974/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-59_059_5262374925501535326/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a right outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-05_884_8956877303331909974/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-41-59_059_5262374925501535326/-mr-10000 NULL NULL 4 val_4 NULL NULL 10 val_10 NULL NULL 17 val_17 @@ -660,14 +660,14 @@ PREHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a full outer join smb_bucket_3 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_2 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-10_056_3171406730646207661/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-02_441_3363280449220696161/-mr-10000 POSTHOOK: query: select /*+mapjoin(b)*/ * from smb_bucket_2 a full outer join smb_bucket_3 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_2 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-10_056_3171406730646207661/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-02_441_3363280449220696161/-mr-10000 NULL NULL 4 val_4 NULL NULL 10 val_10 NULL NULL 17 val_17 Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out_0.17 (working copy) @@ -130,7 +130,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,12 +141,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426267 + transient_lastDdlTime 1282940548 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -194,7 +194,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -205,12 +205,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426267 + transient_lastDdlTime 1282940548 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -219,15 +219,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - b {pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + b {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -239,12 +239,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426262 + transient_lastDdlTime 1282940544 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -256,12 +256,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426262 + transient_lastDdlTime 1282940544 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -273,14 +273,14 @@ Move Operator files: hdfs directory: true - source: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10002 - destination: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -290,20 +290,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426267 + transient_lastDdlTime 1282940548 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -319,9 +319,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10002 [pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10002] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -332,12 +332,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426267 + transient_lastDdlTime 1282940548 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -348,12 +348,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426267 + transient_lastDdlTime 1282940548 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -362,7 +362,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-57-47_019_2977180788242331351/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-22-28_815_2194229907693568005/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -373,12 +373,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426267 + transient_lastDdlTime 1282940548 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -405,11 +405,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-57-57_580_6187260372868339490/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-36_487_5530889340709868410/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-57-57_580_6187260372868339490/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-36_487_5530889340709868410/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] @@ -456,11 +456,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-58-16_557_6420322700624235215/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-51_020_1463379581902051085/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-58-16_557_6420322700624235215/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-51_020_1463379581902051085/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -497,16 +497,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-58-25_722_324941779236398810/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-57_686_7096755867545611891/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-58-25_722_324941779236398810/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-22-57_686_7096755867545611891/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -594,7 +594,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -605,12 +605,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426296 + transient_lastDdlTime 1282940570 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -658,7 +658,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10002 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -669,12 +669,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426296 + transient_lastDdlTime 1282940570 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -683,15 +683,15 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin [b] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [b] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -703,12 +703,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426262 + transient_lastDdlTime 1282940544 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -720,12 +720,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426262 + transient_lastDdlTime 1282940544 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -737,14 +737,14 @@ Move Operator files: hdfs directory: true - source: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10002 - destination: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10002 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10000 + source: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -754,20 +754,20 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426296 + transient_lastDdlTime 1282940570 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -783,9 +783,9 @@ type: string Needs Tagging: false Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10002 [pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10002] + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10002 [pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10002] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -796,12 +796,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426296 + transient_lastDdlTime 1282940570 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -812,12 +812,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426296 + transient_lastDdlTime 1282940570 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -826,7 +826,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/scratchdir/hive_2010-07-29_10-58-30_791_4935058221345912606/-ext-10000 + directory: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-23-01_358_7828412997606473041/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -837,12 +837,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/commit-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280426296 + transient_lastDdlTime 1282940570 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -881,11 +881,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-58-42_432_1139115314160617397/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-08_662_1573157002606906486/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-58-42_432_1139115314160617397/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-08_662_1573157002606906486/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -968,11 +968,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-59-05_708_8341603358636911421/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-23_132_2052671394021903586/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-59-05_708_8341603358636911421/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-23_132_2052671394021903586/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1033,16 +1033,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-59-14_819_244737097790979037/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-29_702_5070225151054685201/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_10-59-14_819_244737097790979037/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-23-29_702_5070225151054685201/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/uniquejoin.q.out =================================================================== --- ql/src/test/results/clientpositive/uniquejoin.q.out (revision 990244) +++ ql/src/test/results/clientpositive/uniquejoin.q.out (working copy) @@ -31,17 +31,17 @@ PREHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key) SELECT a.key, b.key, c.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-37_498_6289754437459240853/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-24_069_7764691059022887570/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key) SELECT a.key, b.key, c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-37_498_6289754437459240853/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-24_069_7764691059022887570/-mr-10000 1 NULL NULL 2 2 2 3 3 NULL @@ -56,49 +56,49 @@ PREHOOK: query: FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key), T3 c (c.key) SELECT a.key, b.key, c.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-40_127_7250085494294549177/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-28_719_4365937084792175328/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key), T3 c (c.key) SELECT a.key, b.key, c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-40_127_7250085494294549177/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-28_719_4365937084792175328/-mr-10000 2 2 2 PREHOOK: query: FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key-1), T3 c (c.key) SELECT a.key, b.key, c.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-43_825_1567908835853076632/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-33_295_8916870494939642635/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key-1), T3 c (c.key) SELECT a.key, b.key, c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-43_825_1567908835853076632/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-33_295_8916870494939642635/-mr-10000 2 3 2 7 8 7 7 8 7 PREHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key, a.val), PRESERVE T2 b (b.key, b.val), PRESERVE T3 c (c.key, c.val) SELECT a.key, a.val, b.key, b.val, c.key, c.val PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-46_450_7673639263976981471/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-38_025_3055236114247084019/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key, a.val), PRESERVE T2 b (b.key, b.val), PRESERVE T3 c (c.key, c.val) SELECT a.key, a.val, b.key, b.val, c.key, c.val POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-46_450_7673639263976981471/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-38_025_3055236114247084019/-mr-10000 1 11 NULL NULL NULL NULL 2 12 NULL NULL 2 12 NULL NULL 2 22 NULL NULL @@ -113,17 +113,17 @@ PREHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b (b.key), PRESERVE T3 c (c.key) SELECT a.key, b.key, c.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-49_123_8832279231761811264/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-42_685_7167574432261426669/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b (b.key), PRESERVE T3 c (c.key) SELECT a.key, b.key, c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-49_123_8832279231761811264/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-42_685_7167574432261426669/-mr-10000 1 NULL NULL 2 2 2 3 3 NULL @@ -137,15 +137,15 @@ PREHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b(b.key) SELECT a.key, b.key PREHOOK: type: QUERY +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-51_850_1196382022472758858/10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-47_377_9190717715738946261/-mr-10000 POSTHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b(b.key) SELECT a.key, b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-42-51_850_1196382022472758858/10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-11-47_377_9190717715738946261/-mr-10000 1 NULL 2 2 3 3 Index: ql/src/test/results/clientpositive/input13.q.out =================================================================== --- ql/src/test/results/clientpositive/input13.q.out (revision 990244) +++ ql/src/test/results/clientpositive/input13.q.out (working copy) @@ -153,7 +153,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10000 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10000 Stage: Stage-0 Move Operator @@ -168,7 +168,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10007 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10007 File Output Operator compressed: false GlobalTableId: 0 @@ -185,7 +185,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10002 Stage: Stage-1 Move Operator @@ -200,7 +200,7 @@ Stage: Stage-8 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10008 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10008 File Output Operator compressed: false GlobalTableId: 0 @@ -217,7 +217,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10004 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10004 Stage: Stage-2 Move Operator @@ -235,7 +235,7 @@ Stage: Stage-11 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10009 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10009 File Output Operator compressed: false GlobalTableId: 0 @@ -252,7 +252,7 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10006 + destination: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10006 Stage: Stage-3 Move Operator @@ -263,7 +263,7 @@ Stage: Stage-14 Map Reduce Alias -> Map Operator Tree: - file:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-52-32_332_2389903696189180400/-ext-10010 + file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-50-17_868_874270772309760488/-ext-10010 File Output Operator compressed: false GlobalTableId: 0 @@ -279,10 +279,10 @@ INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300 PREHOOK: type: QUERY PREHOOK: Input: default@src +PREHOOK: Output: ../build/ql/test/data/warehouse/dest4.out PREHOOK: Output: default@dest1 PREHOOK: Output: default@dest2 PREHOOK: Output: default@dest3@ds=2008-04-08/hr=12 -PREHOOK: Output: ../build/ql/test/data/warehouse/dest4.out POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 @@ -290,10 +290,10 @@ INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300 POSTHOOK: type: QUERY POSTHOOK: Input: default@src +POSTHOOK: Output: ../build/ql/test/data/warehouse/dest4.out POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 POSTHOOK: Output: default@dest3@ds=2008-04-08/hr=12 -POSTHOOK: Output: ../build/ql/test/data/warehouse/dest4.out POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -302,11 +302,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-37_616_364430927880973822/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-50-22_603_8204333485013682573/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-37_616_364430927880973822/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-50-22_603_8204333485013682573/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -399,11 +399,11 @@ PREHOOK: query: SELECT dest2.* FROM dest2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-37_867_3685128862804503912/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-50-22_865_5466685897994447699/-mr-10000 POSTHOOK: query: SELECT dest2.* FROM dest2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-37_867_3685128862804503912/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-50-22_865_5466685897994447699/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -517,11 +517,11 @@ PREHOOK: query: SELECT dest3.* FROM dest3 PREHOOK: type: QUERY PREHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-38_110_1838979259788850926/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-50-23_137_2677954228712032208/-mr-10000 POSTHOOK: query: SELECT dest3.* FROM dest3 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest3@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-52-38_110_1838979259788850926/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-50-23_137_2677954228712032208/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join28.q.out_0.17 =================================================================== --- ql/src/test/results/clientpositive/join28.q.out_0.17 (revision 990244) +++ ql/src/test/results/clientpositive/join28.q.out_0.17 (working copy) @@ -218,7 +218,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-30_668_6907132877729345606/-ext-10000 + destination: pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-17_782_8830717340632746316/-ext-10000 Stage: Stage-0 Move Operator @@ -233,7 +233,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-18_12-00-30_668_6907132877729345606/-ext-10002 + pfile:/data/users/jsichi/open/commit-trunk/build/ql/scratchdir/hive_2010-08-27_13-51-17_782_8830717340632746316/-ext-10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -264,8 +264,8 @@ FROM src1 x JOIN src y ON (x.key = y.key)) subq JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 @@ -275,8 +275,8 @@ FROM src1 x JOIN src y ON (x.key = y.key)) subq JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] @@ -284,11 +284,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-36_865_2820753628257767618/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-51-22_731_8886120749922178144/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-18_12-00-36_865_2820753628257767618/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-27_13-51-22_731_8886120749922178144/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] 128 val_128 Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 990244) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy) @@ -73,11 +73,11 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -88,12 +88,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -104,16 +104,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -127,13 +127,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -144,17 +144,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -168,13 +168,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -185,13 +185,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -223,7 +223,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-20-49_665_8125482111672965694/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-21-27_620_7830111987780351854/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -248,10 +248,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-20-49_835_7616781418926576667/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-21-27_783_281010121981553022/-mr-10000 POSTHOOK: query: FROM src a FULL OUTER JOIN @@ -260,10 +260,10 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-20-49_835_7616781418926576667/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-21-27_783_281010121981553022/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 @@ -343,13 +343,13 @@ type: string Needs Tagging: true Path -> Alias: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src [a] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b] - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b] Path -> Partition: - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -360,12 +360,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -376,16 +376,16 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430369 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -399,13 +399,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -416,17 +416,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -440,13 +440,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -457,17 +457,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -481,13 +481,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -498,17 +498,17 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart - pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat @@ -522,13 +522,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -539,13 +539,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/heyongqiang/Documents/workspace/Hive-2/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280430361 + transient_lastDdlTime 1282861471 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -577,7 +577,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-20-56_325_1759848794932471215/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_16-21-32_388_6011123055029150164/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -602,12 +602,12 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -PREHOOK: Input: default@src -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-20-56_460_3863922570984581855/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-21-32_555_1412573489698439156/-mr-10000 POSTHOOK: query: FROM src a FULL OUTER JOIN @@ -616,12 +616,12 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -POSTHOOK: Input: default@src -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-20-56_460_3863922570984581855/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-21-32_555_1412573489698439156/-mr-10000 17 val_17 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 Index: ql/src/test/results/clientpositive/join28.q.out =================================================================== --- ql/src/test/results/clientpositive/join28.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join28.q.out (working copy) @@ -218,7 +218,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-09_842_1589855306338673933/-ext-10000 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-47_738_5089187786455782370/-ext-10000 Stage: Stage-0 Move Operator @@ -233,7 +233,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-09_842_1589855306338673933/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-58-47_738_5089187786455782370/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -251,8 +251,8 @@ FROM src1 x JOIN src y ON (x.key = y.key)) subq JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@src1 -PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 @@ -262,8 +262,8 @@ FROM src1 x JOIN src y ON (x.key = y.key)) subq JOIN srcpart z ON (subq.key1 = z.key and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 -POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] @@ -271,11 +271,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-54-15_157_8736292224753845728/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-58-52_535_7242516801546424528/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-54-15_157_8736292224753845728/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-58-52_535_7242516801546424528/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] 128 val_128 Index: ql/src/test/results/clientpositive/join14.q.out =================================================================== --- ql/src/test/results/clientpositive/join14.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join14.q.out (working copy) @@ -114,27 +114,27 @@ PREHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100 INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@src PREHOOK: Output: default@dest1 POSTHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100 INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.c2 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select dest1.* from dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_14-30-51_455_2981410831424631574/-mr-10000 +PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-57-17_999_3604615124362926210/-mr-10000 POSTHOOK: query: select dest1.* from dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_14-30-51_455_2981410831424631574/-mr-10000 +POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-57-17_999_3604615124362926210/-mr-10000 POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.c2 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] 103 val_103 Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 990244) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy) @@ -130,7 +130,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,12 +141,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244833 + transient_lastDdlTime 1282861882 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -194,7 +194,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -205,12 +205,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244833 + transient_lastDdlTime 1282861882 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -219,15 +219,15 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + b {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -239,12 +239,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244828 + transient_lastDdlTime 1282861877 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -256,12 +256,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244828 + transient_lastDdlTime 1282861877 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -273,14 +273,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -290,24 +290,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244833 + transient_lastDdlTime 1282861882 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -318,21 +318,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244833 + transient_lastDdlTime 1282861882 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-13_160_9149295086394272923/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-22_510_8047018522141779961/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -343,12 +343,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244833 + transient_lastDdlTime 1282861882 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -359,12 +359,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244833 + transient_lastDdlTime 1282861882 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -390,11 +390,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-22_177_515941965482963176/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-29_484_1206327158732357255/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-22_177_515941965482963176/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-29_484_1206327158732357255/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] @@ -441,11 +441,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-39_540_4420511888816352463/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-43_349_1218996968253937500/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-39_540_4420511888816352463/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-43_349_1218996968253937500/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -482,16 +482,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-46_912_1335201447409064835/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-49_725_5297129701120453460/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-07-46_912_1335201447409064835/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-49_725_5297129701120453460/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -579,7 +579,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -590,12 +590,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244859 + transient_lastDdlTime 1282861903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -643,7 +643,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -654,12 +654,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244859 + transient_lastDdlTime 1282861903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -668,15 +668,15 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin [b] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [b] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -688,12 +688,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244828 + transient_lastDdlTime 1282861877 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -705,12 +705,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244828 + transient_lastDdlTime 1282861877 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -722,14 +722,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -739,24 +739,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244859 + transient_lastDdlTime 1282861903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -767,21 +767,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244859 + transient_lastDdlTime 1282861903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-07-51_525_8871964927511981127/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-31-53_021_93189871321207259/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -792,12 +792,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244859 + transient_lastDdlTime 1282861903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -808,12 +808,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282244859 + transient_lastDdlTime 1282861903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -851,11 +851,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-08-00_142_5359007511846547721/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-59_933_2233538912359490176/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-08-00_142_5359007511846547721/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-31-59_933_2233538912359490176/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -938,11 +938,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-08-16_690_3398770801572818640/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-32-13_787_1620251253710582073/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-08-16_690_3398770801572818640/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-32-13_787_1620251253710582073/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1003,16 +1003,16 @@ from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key PREHOOK: type: QUERY +PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Input: default@bucketmapjoin_hash_result_1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-08-24_530_5584573660114801116/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-32-20_167_2165681320829401159/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Input: default@bucketmapjoin_hash_result_1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-08-24_530_5584573660114801116/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-32-20_167_2165681320829401159/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 990244) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -118,7 +118,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/heyongqiang/hive_2010-08-10_15-20-19_225_1224246482641447263/-mr-10002 + directory: file:/tmp/jsichi/hive_2010-08-26_17-09-26_366_7362686340086914056/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -173,7 +173,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/heyongqiang/hive_2010-08-10_15-20-19_225_1224246482641447263/-mr-10002 + directory: file:/tmp/jsichi/hive_2010-08-26_17-09-26_366_7362686340086914056/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -186,9 +186,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a] Path -> Partition: - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22/ds=1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22/ds=1 Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -201,13 +201,13 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281478813 + transient_lastDdlTime 1282867758 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,13 +218,13 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281478813 + transient_lastDdlTime 1282867758 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 name: dst_union22 @@ -232,7 +232,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-08-10_15-20-19_225_1224246482641447263/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_17-09-26_366_7362686340086914056/-mr-10002 Select Operator expressions: expr: _col0 @@ -275,7 +275,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_15-20-19_225_1224246482641447263/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-09-26_366_7362686340086914056/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -286,13 +286,13 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281478813 + transient_lastDdlTime 1282867758 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 TotalFiles: 1 @@ -336,7 +336,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_15-20-19_225_1224246482641447263/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-09-26_366_7362686340086914056/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -347,23 +347,23 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281478813 + transient_lastDdlTime 1282867758 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/tmp/heyongqiang/hive_2010-08-10_15-20-19_225_1224246482641447263/-mr-10002 [file:/tmp/heyongqiang/hive_2010-08-10_15-20-19_225_1224246482641447263/-mr-10002] - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta] + file:/tmp/jsichi/hive_2010-08-26_17-09-26_366_7362686340086914056/-mr-10002 [file:/tmp/jsichi/hive_2010-08-26_17-09-26_366_7362686340086914056/-mr-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta] Path -> Partition: - file:/tmp/heyongqiang/hive_2010-08-10_15-20-19_225_1224246482641447263/-mr-10002 + file:/tmp/jsichi/hive_2010-08-26_17-09-26_366_7362686340086914056/-mr-10002 Partition base file name: -mr-10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -379,7 +379,7 @@ columns _col0,_col1,_col10,_col11 columns.types string,string,string,string escape.delim \ - pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22_delta/ds=1 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1 Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -392,13 +392,13 @@ columns.types string:string:string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22_delta + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta name dst_union22_delta partition_columns ds serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281478813 + transient_lastDdlTime 1282867759 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -409,13 +409,13 @@ columns.types string:string:string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22_delta + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta name dst_union22_delta partition_columns ds serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281478813 + transient_lastDdlTime 1282867759 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22_delta name: dst_union22_delta @@ -426,7 +426,7 @@ partition: ds 2 replace: true - source: pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_15-20-19_225_1224246482641447263/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-09-26_366_7362686340086914056/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -436,16 +436,16 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 partition_columns ds serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1281478813 + transient_lastDdlTime 1282867758 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 - tmp directory: pfile:/data/users/heyongqiang/hive-trunk-clean/build/ql/scratchdir/hive_2010-08-10_15-20-19_225_1224246482641447263/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_17-09-26_366_7362686340086914056/-ext-10001 PREHOOK: query: insert overwrite table dst_union22 partition (ds='2') @@ -460,8 +460,8 @@ ) subq PREHOOK: type: QUERY +PREHOOK: Input: default@dst_union22@ds=1 PREHOOK: Input: default@dst_union22_delta@ds=1 -PREHOOK: Input: default@dst_union22@ds=1 PREHOOK: Output: default@dst_union22@ds=2 POSTHOOK: query: insert overwrite table dst_union22 partition (ds='2') select * from @@ -475,8 +475,8 @@ ) subq POSTHOOK: type: QUERY +POSTHOOK: Input: default@dst_union22@ds=1 POSTHOOK: Input: default@dst_union22_delta@ds=1 -POSTHOOK: Input: default@dst_union22@ds=1 POSTHOOK: Output: default@dst_union22@ds=2 POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -495,11 +495,11 @@ PREHOOK: query: select * from dst_union22 where ds = '2' order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@dst_union22@ds=2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-08-10_15-20-24_642_7811595587149170257/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-09-33_354_7230169091871487696/-mr-10000 POSTHOOK: query: select * from dst_union22 where ds = '2' order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dst_union22@ds=2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-08-10_15-20-24_642_7811595587149170257/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-09-33_354_7230169091871487696/-mr-10000 POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/load_dyn_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part1.q.out (revision 990244) +++ ql/src/test/results/clientpositive/load_dyn_part1.q.out (working copy) @@ -25,7 +25,7 @@ ds string hr string -Detailed Table Information Table(tableName:nzhang_part1, dbName:default, owner:null, createTime:1282245001, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/nzhang_part1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{EXTERNAL=FALSE, transient_lastDdlTime=1282245001}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:nzhang_part1, dbName:default, owner:null, createTime:1282864103, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/nzhang_part1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{EXTERNAL=FALSE, transient_lastDdlTime=1282864103}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: explain from srcpart insert overwrite table nzhang_part1 partition (ds, hr) select key, value, ds, hr where ds <= '2008-04-08' @@ -109,7 +109,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-10-01_577_4226133645559107864/-ext-10000 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-08-24_132_8350648338748839361/-ext-10000 Stage: Stage-0 Move Operator @@ -127,7 +127,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-10-01_577_4226133645559107864/-ext-10004 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-08-24_132_8350648338748839361/-ext-10004 File Output Operator compressed: false GlobalTableId: 0 @@ -144,7 +144,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-10-01_577_4226133645559107864/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-08-24_132_8350648338748839361/-ext-10002 Stage: Stage-1 Move Operator @@ -162,7 +162,7 @@ Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-19_12-10-01_577_4226133645559107864/-ext-10005 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_16-08-24_132_8350648338748839361/-ext-10005 File Output Operator compressed: false GlobalTableId: 0 @@ -189,10 +189,10 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 +POSTHOOK: Output: default@nzhang_part1@ds=2008-04-08/hr=11 +POSTHOOK: Output: default@nzhang_part1@ds=2008-04-08/hr=12 POSTHOOK: Output: default@nzhang_part2@ds=2008-12-31/hr=11 POSTHOOK: Output: default@nzhang_part2@ds=2008-12-31/hr=12 -POSTHOOK: Output: default@nzhang_part1@ds=2008-04-08/hr=11 -POSTHOOK: Output: default@nzhang_part1@ds=2008-04-08/hr=12 POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -233,12 +233,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11 PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-10-12_450_5318045926499588659/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-08-33_285_7916313488954403803/-mr-10000 POSTHOOK: query: select * from nzhang_part1 where ds is not null and hr is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11 POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-10-12_450_5318045926499588659/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-08-33_285_7916313488954403803/-mr-10000 POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -1251,12 +1251,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11 PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-10-12_966_4486180906903303996/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-08-33_856_4074078546892062221/-mr-10000 POSTHOOK: query: select * from nzhang_part2 where ds is not null and hr is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11 POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-19_12-10-12_966_4486180906903303996/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-08-33_856_4074078546892062221/-mr-10000 POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join_reorder2.q.out =================================================================== --- ql/src/test/results/clientpositive/join_reorder2.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join_reorder2.q.out (working copy) @@ -178,21 +178,21 @@ JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key PREHOOK: type: QUERY -PREHOOK: Input: default@t4 +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-39_475_5061641210355834290/-mr-10000 +PREHOOK: Input: default@t4 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-28_530_6753349918845883529/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key JOIN T4 d ON c.key = d.key POSTHOOK: type: QUERY -POSTHOOK: Input: default@t4 +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-39_475_5061641210355834290/-mr-10000 +POSTHOOK: Input: default@t4 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-28_530_6753349918845883529/-mr-10000 2 12 2 22 2 12 2 12 PREHOOK: query: EXPLAIN SELECT /*+ STREAMTABLE(a) */ * @@ -411,19 +411,19 @@ JOIN T3 c ON a.val = c.val JOIN T4 d ON a.key + 1 = d.key + 1 PREHOOK: type: QUERY -PREHOOK: Input: default@t4 +PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Input: default@t1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-45_374_8957410892146875864/-mr-10000 +PREHOOK: Input: default@t4 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-33_450_777508846599090366/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON a.val = c.val JOIN T4 d ON a.key + 1 = d.key + 1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@t4 +POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-03-45_374_8957410892146875864/-mr-10000 +POSTHOOK: Input: default@t4 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-06-33_450_777508846599090366/-mr-10000 2 22 2 12 2 12 2 12 Index: ql/src/test/results/clientpositive/input28.q.out =================================================================== --- ql/src/test/results/clientpositive/input28.q.out (revision 990244) +++ ql/src/test/results/clientpositive/input28.q.out (working copy) @@ -13,24 +13,24 @@ PREHOOK: query: insert overwrite table tst partition(d='2009-01-01') select tst.a, src.value from tst join src ON (tst.a = src.key) PREHOOK: type: QUERY +PREHOOK: Input: default@src PREHOOK: Input: default@tst@d=2009-01-01 -PREHOOK: Input: default@src PREHOOK: Output: default@tst@d=2009-01-01 POSTHOOK: query: insert overwrite table tst partition(d='2009-01-01') select tst.a, src.value from tst join src ON (tst.a = src.key) POSTHOOK: type: QUERY +POSTHOOK: Input: default@src POSTHOOK: Input: default@tst@d=2009-01-01 -POSTHOOK: Input: default@src POSTHOOK: Output: default@tst@d=2009-01-01 POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).a SIMPLE [(tst)tst.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).b SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from tst where tst.d='2009-01-01' PREHOOK: type: QUERY PREHOOK: Input: default@tst@d=2009-01-01 -PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-57-14_720_7169706567620515894/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-51-46_845_5870944866794428279/-mr-10000 POSTHOOK: query: select * from tst where tst.d='2009-01-01' POSTHOOK: type: QUERY POSTHOOK: Input: default@tst@d=2009-01-01 -POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-57-14_720_7169706567620515894/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-51-46_845_5870944866794428279/-mr-10000 POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).a SIMPLE [(tst)tst.FieldSchema(name:a, type:string, comment:null), ] POSTHOOK: Lineage: tst PARTITION(d=2009-01-01).b SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/smb_mapjoin_4.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_4.q.out (revision 990244) +++ ql/src/test/results/clientpositive/smb_mapjoin_4.q.out (working copy) @@ -107,16 +107,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-16_409_5138180854708940810/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-09_324_5003697422621652815/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-16_409_5138180854708940810/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-09_324_5003697422621652815/-mr-10000 PREHOOK: query: explain select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY @@ -196,16 +196,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-21_597_3057219238806085881/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-12_911_4454279447652036446/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-21_597_3057219238806085881/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-12_911_4454279447652036446/-mr-10000 PREHOOK: query: explain select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY @@ -285,16 +285,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-29_753_1380615669511069311/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-16_467_5197011805004822874/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-29_753_1380615669511069311/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-16_467_5197011805004822874/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL @@ -379,16 +379,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-35_571_1378309731459967102/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-20_045_1628227846316651581/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-35_571_1378309731459967102/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-20_045_1628227846316651581/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -474,16 +474,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-41_533_2015841460506976627/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-23_706_8199432227541064820/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-41_533_2015841460506976627/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-23_706_8199432227541064820/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL @@ -574,16 +574,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-46_768_5503417338251350024/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-27_290_3824008948964071639/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-46_768_5503417338251350024/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-27_290_3824008948964071639/-mr-10000 NULL NULL 20 val_20 20 val_20 NULL NULL 23 val_23 23 val_23 PREHOOK: query: explain @@ -665,16 +665,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-51_754_1468604868878284906/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-30_936_5263381546938369405/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-51_754_1468604868878284906/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-30_936_5263381546938369405/-mr-10000 NULL NULL 20 val_20 20 val_20 NULL NULL 23 val_23 23 val_23 NULL NULL 25 val_25 NULL NULL @@ -758,16 +758,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-57_851_8735805432799020968/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-34_501_9167438490893243306/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-46-57_851_8735805432799020968/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-34_501_9167438490893243306/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -853,16 +853,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-02_882_8778606981409240289/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-38_051_4511190830431439223/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-02_882_8778606981409240289/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-38_051_4511190830431439223/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -950,16 +950,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-07_985_4686365613476850781/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-41_627_6765762965982547245/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-07_985_4686365613476850781/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-41_627_6765762965982547245/-mr-10000 NULL NULL 20 val_20 20 val_20 NULL NULL 23 val_23 23 val_23 PREHOOK: query: explain @@ -1041,16 +1041,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-13_919_3840473410453589647/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-45_184_5223207976717364002/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-13_919_3840473410453589647/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-45_184_5223207976717364002/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL @@ -1139,16 +1139,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-19_875_2707544952287739378/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-48_743_7572889603110654998/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-19_875_2707544952287739378/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-48_743_7572889603110654998/-mr-10000 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 17 val_17 @@ -1234,16 +1234,16 @@ PREHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key PREHOOK: type: QUERY +PREHOOK: Input: default@smb_bucket_1 PREHOOK: Input: default@smb_bucket_2 PREHOOK: Input: default@smb_bucket_3 -PREHOOK: Input: default@smb_bucket_1 -PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-29_028_7569403133068144512/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-52_303_1702164061537345426/-mr-10000 POSTHOOK: query: select /*+mapjoin(a,b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key POSTHOOK: type: QUERY +POSTHOOK: Input: default@smb_bucket_1 POSTHOOK: Input: default@smb_bucket_2 POSTHOOK: Input: default@smb_bucket_3 -POSTHOOK: Input: default@smb_bucket_1 -POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-07-29_13-47-29_028_7569403133068144512/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_16-42-52_303_1702164061537345426/-mr-10000 1 val_1 NULL NULL NULL NULL 3 val_3 NULL NULL NULL NULL 4 val_4 NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/combine2.q.out =================================================================== --- ql/src/test/results/clientpositive/combine2.q.out (revision 990244) +++ ql/src/test/results/clientpositive/combine2.q.out (working copy) @@ -21,7 +21,6 @@ select key, '2010-04-21 09:45:00' value from src where key = 19) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@combine2@value=| POSTHOOK: Output: default@combine2@value=2010-04-21 09%3A45%3A00 POSTHOOK: Output: default@combine2@value=val_0 POSTHOOK: Output: default@combine2@value=val_2 @@ -29,6 +28,7 @@ POSTHOOK: Output: default@combine2@value=val_5 POSTHOOK: Output: default@combine2@value=val_8 POSTHOOK: Output: default@combine2@value=val_9 +POSTHOOK: Output: default@combine2@value=| POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -135,7 +135,7 @@ PREHOOK: Input: default@combine2@value=val_8 PREHOOK: Input: default@combine2@value=val_9 PREHOOK: Input: default@combine2@value=| -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-25_11-47-25_377_8872210988132929507/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-35-28_042_8085154637044190373/-mr-10000 POSTHOOK: query: select key, value from combine2 where value is not null order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 @@ -146,7 +146,7 @@ POSTHOOK: Input: default@combine2@value=val_8 POSTHOOK: Input: default@combine2@value=val_9 POSTHOOK: Input: default@combine2@value=| -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-25_11-47-25_377_8872210988132929507/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-35-28_042_8085154637044190373/-mr-10000 POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -220,16 +220,16 @@ type: bigint Needs Tagging: false Path -> Alias: - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 [combine2] - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_0 [combine2] - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_2 [combine2] - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_4 [combine2] - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_5 [combine2] - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_8 [combine2] - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_9 [combine2] - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=| [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_0 [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_2 [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_4 [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_5 [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_8 [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_9 [combine2] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=| [combine2] Path -> Partition: - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 Partition base file name: value=2010-04-21 09%3A45%3A00 input format: org.apache.hadoop.mapred.TextInputFormat @@ -242,13 +242,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -259,17 +259,17 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_0 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_0 Partition base file name: value=val_0 input format: org.apache.hadoop.mapred.TextInputFormat @@ -282,13 +282,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -299,17 +299,17 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_2 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_2 Partition base file name: value=val_2 input format: org.apache.hadoop.mapred.TextInputFormat @@ -322,13 +322,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -339,17 +339,17 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_4 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_4 Partition base file name: value=val_4 input format: org.apache.hadoop.mapred.TextInputFormat @@ -362,13 +362,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -379,17 +379,17 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_5 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_5 Partition base file name: value=val_5 input format: org.apache.hadoop.mapred.TextInputFormat @@ -402,13 +402,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -419,17 +419,17 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_8 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_8 Partition base file name: value=val_8 input format: org.apache.hadoop.mapred.TextInputFormat @@ -442,13 +442,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -459,17 +459,17 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=val_9 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_9 Partition base file name: value=val_9 input format: org.apache.hadoop.mapred.TextInputFormat @@ -482,13 +482,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -499,17 +499,17 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 - pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2/value=| + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2/value=| Partition base file name: value=| input format: org.apache.hadoop.mapred.TextInputFormat @@ -522,13 +522,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -539,13 +539,13 @@ columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/jssarma/hive_trunk/build/ql/test/data/warehouse/combine2 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/combine2 name combine2 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1280083638 + transient_lastDdlTime 1282862120 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: combine2 name: combine2 @@ -564,7 +564,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/jssarma/hive_2010-07-25_11-47-29_723_4947781089455768746/-ext-10001 + directory: file:/tmp/jsichi/hive_2010-08-26_15-35-34_459_5389863301551402910/-ext-10001 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -591,7 +591,7 @@ PREHOOK: Input: default@combine2@value=val_8 PREHOOK: Input: default@combine2@value=val_9 PREHOOK: Input: default@combine2@value=| -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-25_11-47-29_909_8198850586017643302/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-35-34_737_7254387281263806727/-mr-10000 POSTHOOK: query: select count(1) from combine2 where value is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 @@ -602,7 +602,7 @@ POSTHOOK: Input: default@combine2@value=val_8 POSTHOOK: Input: default@combine2@value=val_9 POSTHOOK: Input: default@combine2@value=| -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-25_11-47-29_909_8198850586017643302/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-35-34_737_7254387281263806727/-mr-10000 POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -709,14 +709,14 @@ PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-25_11-47-34_373_2325908341143034034/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-35-42_237_8954112902391689238/-mr-10000 POSTHOOK: query: select ds, count(1) from srcpart where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-25_11-47-34_373_2325908341143034034/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-35-42_237_8954112902391689238/-mr-10000 POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 990244) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -48,7 +48,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/nzhang/hive_2010-08-17_22-54-34_305_5971089623035702886/-mr-10003 + directory: file:/tmp/jsichi/hive_2010-08-26_15-59-39_400_2108121170071807592/-mr-10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -84,7 +84,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/nzhang/hive_2010-08-17_22-54-34_305_5971089623035702886/-mr-10003 + directory: file:/tmp/jsichi/hive_2010-08-26_15-59-39_400_2108121170071807592/-mr-10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -97,9 +97,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src [y] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src [y] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src + pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,12 +110,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110633 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -126,12 +126,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/src + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110633 + transient_lastDdlTime 1282861477 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -139,7 +139,7 @@ Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/tmp/nzhang/hive_2010-08-17_22-54-34_305_5971089623035702886/-mr-10003 + file:/tmp/jsichi/hive_2010-08-26_15-59-39_400_2108121170071807592/-mr-10003 Select Operator expressions: expr: _col0 @@ -182,7 +182,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -193,12 +193,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110874 + transient_lastDdlTime 1282863579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -261,7 +261,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10002 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -272,21 +272,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110874 + transient_lastDdlTime 1282863579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/tmp/nzhang/hive_2010-08-17_22-54-34_305_5971089623035702886/-mr-10003 [file:/tmp/nzhang/hive_2010-08-17_22-54-34_305_5971089623035702886/-mr-10003] + file:/tmp/jsichi/hive_2010-08-26_15-59-39_400_2108121170071807592/-mr-10003 [file:/tmp/jsichi/hive_2010-08-26_15-59-39_400_2108121170071807592/-mr-10003] Path -> Partition: - file:/tmp/nzhang/hive_2010-08-17_22-54-34_305_5971089623035702886/-mr-10003 + file:/tmp/jsichi/hive_2010-08-26_15-59-39_400_2108121170071807592/-mr-10003 Partition base file name: -mr-10003 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -310,14 +310,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10002 - destination: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10002 + destination: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10000 + source: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -327,24 +327,24 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110874 + transient_lastDdlTime 1282863579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10001 + tmp directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10001 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10000 + directory: pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -355,21 +355,21 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110874 + transient_lastDdlTime 1282863579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10002 [pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10002] + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10002 [pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10002] Path -> Partition: - pfile:/data/users/nzhang/work/870/apache-hive/build/ql/scratchdir/hive_2010-08-17_22-54-34_305_5971089623035702886/-ext-10002 + pfile:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-08-26_15-59-39_400_2108121170071807592/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -380,12 +380,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110874 + transient_lastDdlTime 1282863579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -396,12 +396,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/nzhang/work/870/apache-hive/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1282110874 + transient_lastDdlTime 1282863579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -412,18 +412,18 @@ FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11) PREHOOK: type: QUERY -PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@src PREHOOK: Input: default@src1 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@dest_j1 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,z) */ x.key, z.value, y.value FROM src1 x JOIN src y ON (x.key = y.key) JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11) POSTHOOK: type: QUERY -POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Output: default@dest_j1 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] @@ -431,11 +431,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-54-42_867_3987412425334906205/-mr-10000 +PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-59-45_924_802354274907767943/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-08-17_22-54-42_867_3987412425334906205/-mr-10000 +POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_15-59-45_924_802354274907767943/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (revision 990244) +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (working copy) @@ -109,12 +109,8 @@ console.printError("POSTHOOK: type: " + sess.getCommandType()); } - for (ReadEntity re : inputs) { - console.printError("POSTHOOK: Input: " + re.toString()); - } - for (WriteEntity we : outputs) { - console.printError("POSTHOOK: Output: " + we.toString()); - } + PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: "); + PreExecutePrinter.printEntities(console, outputs, "POSTHOOK: Output: "); // Also print out the generic lineage information if there is any if (linfo != null) { Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (revision 990244) +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (working copy) @@ -18,6 +18,9 @@ package org.apache.hadoop.hive.ql.hooks; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Set; import org.apache.hadoop.hive.ql.session.SessionState; @@ -46,12 +49,18 @@ console.printError("PREHOOK: type: " + sess.getCommandType()); } - for (ReadEntity re : inputs) { - console.printError("PREHOOK: Input: " + re.toString()); + printEntities(console, inputs, "PREHOOK: Input: "); + printEntities(console, outputs, "PREHOOK: Output: "); + } + + static void printEntities(LogHelper console, Set entities, String prefix) { + List strings = new ArrayList(); + for (Object o : entities) { + strings.add(o.toString()); } - for (WriteEntity we : outputs) { - console.printError("PREHOOK: Output: " + we.toString()); + Collections.sort(strings); + for (String s : strings) { + console.printError(prefix + s); } } - }