Index: eclipse-templates/TestNegativeCliDriver.launchtemplate =================================================================== --- eclipse-templates/TestNegativeCliDriver.launchtemplate (revision 0) +++ eclipse-templates/TestNegativeCliDriver.launchtemplate (revision 0) @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 801363) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -155,7 +155,6 @@ HIVEOPTCP("hive.optimize.cp", true), // column pruner HIVEOPTPPD("hive.optimize.ppd", true); // predicate pushdown - public final String varname; public final String defaultVal; public final int defaultIntVal; Index: ql/src/test/results/clientnegative/script_error.q.out =================================================================== --- ql/src/test/results/clientnegative/script_error.q.out (revision 801363) +++ ql/src/test/results/clientnegative/script_error.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -40,5 +42,5 @@ query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue) FROM src Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/1098616602/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2091756058/10000 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask Index: ql/src/test/results/clientpositive/order.q.out =================================================================== --- ql/src/test/results/clientpositive/order.q.out (revision 801363) +++ ql/src/test/results/clientpositive/order.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Select Operator expressions: expr: key @@ -47,7 +49,7 @@ query: SELECT x.* FROM SRC x ORDER BY key limit 10 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1475617872/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/204661322/10000 0 val_0 0 val_0 0 val_0 @@ -72,6 +74,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Select Operator expressions: expr: key @@ -107,7 +111,7 @@ query: SELECT x.* FROM SRC x ORDER BY key desc limit 10 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/111457087/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1898497875/10000 98 val_98 98 val_98 97 val_97 Index: ql/src/test/results/clientpositive/no_hooks.q.out =================================================================== --- ql/src/test/results/clientpositive/no_hooks.q.out (revision 801363) +++ ql/src/test/results/clientpositive/no_hooks.q.out (working copy) @@ -11,6 +11,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -24,6 +26,8 @@ expr: value type: string src1 + TableScan + alias: src1 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -69,7 +73,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/553392584/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1694927557/10002 Reduce Output Operator key expressions: expr: _col0 Index: ql/src/test/results/clientpositive/udf_hash.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_hash.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_hash.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: hash(UDFToByte(1)) @@ -62,5 +64,5 @@ hash(1, 2, 3) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/670266340/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/378731640/10000 1 2 3 -1097262584 1067450368 1076887552 51508 96354 1 0 1026 Index: ql/src/test/results/clientpositive/join6.q.out =================================================================== --- ql/src/test/results/clientpositive/join6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join6.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: c:a:src1 + TableScan + alias: src1 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) @@ -55,6 +57,8 @@ expr: _col1 type: string c:b:src2 + TableScan + alias: src2 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) @@ -162,7 +166,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2039611986/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1349249596/10000 11 val_11 NULL NULL 12 val_12 NULL NULL 12 val_12 NULL NULL Index: ql/src/test/results/clientpositive/ppd_join3.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_join3.q.out (working copy) @@ -21,6 +21,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Filter Operator predicate: expr: ((key <> '12') and (key <> '4')) @@ -51,6 +53,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (((key <> '11') and (key < '400')) and ((key > '0') and ((value <> 'val_500') or (key > '1')))) @@ -85,6 +89,8 @@ expr: _col1 type: string src3:src + TableScan + alias: src Filter Operator predicate: expr: ((key <> '13') and (key <> '1')) @@ -154,7 +160,7 @@ ON src1.c1 = src3.c5 WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1') Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/47779120/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1265462406/10000 100 val_100 100 val_100 100 val_100 Index: ql/src/test/results/clientpositive/union11.q.out =================================================================== --- ql/src/test/results/clientpositive/union11.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union11.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1:unionsrc-subquery1-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -58,7 +60,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/399328241/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/204278214/10002 Union Select Operator expressions: @@ -85,7 +87,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/399328241/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/204278214/10003 Union Select Operator expressions: @@ -112,7 +114,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/399328241/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/204278214/10004 Union Select Operator expressions: @@ -166,6 +168,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery2:unionsrc-subquery1-subquery2:s2 + TableScan + alias: s2 Select Operator Group By Operator aggregations: @@ -202,6 +206,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s3 + TableScan + alias: s3 Select Operator Group By Operator aggregations: @@ -245,7 +251,7 @@ UNION ALL select 'tst3' as key, count(1) as value from src s3) unionsrc group by unionsrc.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1673535068/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/225927230/10000 tst1 1 tst2 1 tst3 1 Index: ql/src/test/results/clientpositive/ppd_union.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_union.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_union.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unioned_query-subquery1:src + TableScan + alias: src Filter Operator predicate: expr: ((key < '100') and ((key > '4') and (value > 'val_4'))) @@ -52,6 +54,8 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unioned_query-subquery2:src + TableScan + alias: src Filter Operator predicate: expr: ((key > '150') and ((key > '4') and (value > 'val_4'))) @@ -99,7 +103,7 @@ SELECT unioned_query.* WHERE key > '4' and value > 'val_4' Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1644693517/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1828690205/10000 86 val_86 409 val_409 98 val_98 Index: ql/src/test/results/clientpositive/udf1.q.out =================================================================== --- ql/src/test/results/clientpositive/udf1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf1.q.out (working copy) @@ -24,6 +24,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(86)) @@ -90,10 +92,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/emil/hive1/hive1/build/ql/tmp/1012030734/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2016734734/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/emil/hive1/hive1/build/ql/tmp/431659275/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/519901764/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -174,5 +176,5 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/220608265/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/979014828/10000 true false true true true false false false true true false true false acc abc abb hive hadoop AaAbAcA false Index: ql/src/test/results/clientpositive/union19.q.out =================================================================== --- ql/src/test/results/clientpositive/union19.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union19.q.out (working copy) @@ -28,6 +28,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -63,7 +65,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/626825337/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2105547105/10004 Union Select Operator expressions: @@ -109,7 +111,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/626825337/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2105547105/10005 Union Select Operator expressions: @@ -204,6 +206,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -229,7 +233,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/190751491/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/78068858/10000 0 3 10 1 100 2 @@ -542,7 +546,7 @@ tst1 1 query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1215563636/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/240933418/10000 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 Index: ql/src/test/results/clientpositive/join12.q.out =================================================================== --- ql/src/test/results/clientpositive/join12.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join12.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -40,6 +42,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -66,6 +70,8 @@ expr: _col0 type: string src3:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(80)) @@ -127,7 +133,7 @@ (SELECT src.key as c5, src.value as c6 from src) src3 ON src1.c1 = src3.c5 AND src3.c5 < 80 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/23536302/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1418897079/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input32.q.out =================================================================== --- ql/src/test/results/clientpositive/input32.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input32.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: srcbucket + TableScan + alias: srcbucket Select Operator Group By Operator aggregations: @@ -71,7 +73,7 @@ Output: default/tst_dest32 query: select * from tst_dest32 Input: default/tst_dest32 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/54515043/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1142748652/10000 1000 query: drop table tst_dest32 query: drop table dest32 Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -27,11 +27,15 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:subq1-subquery1:x + TableScan + alias: x Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) < UDFToDouble(20)) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) < UDFToDouble(20)) type: boolean @@ -62,9 +66,9 @@ type: bigint Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -79,7 +83,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src Reduce Operator Tree: @@ -101,7 +105,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10002 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -113,7 +117,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10002 Union Common Join Operator condition map: @@ -156,7 +160,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10003 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10003 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -170,10 +174,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10004 Union Common Join Operator condition map: @@ -216,7 +220,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10003 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10003 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -230,7 +234,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Local Work: @@ -241,6 +245,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -282,7 +288,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10003 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10003 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -296,15 +302,15 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10002 - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10004 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10002 Partition input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -313,7 +319,7 @@ columns _col0,_col1 columns.types string,bigint escape.delim \ - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10004 Partition input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -329,11 +335,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10003 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1741920959/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10003 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1251033308/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -349,9 +355,9 @@ type: int Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10003 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10003 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -366,7 +372,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Reduce Operator Tree: @@ -374,7 +380,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1741920959/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1251033308/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -387,7 +393,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 @@ -396,7 +402,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1741920959/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1251033308/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -410,20 +416,24 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1741920959/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1251033308/10001 Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: null-subquery2:subq1-subquery2:x1 + TableScan + alias: x1 Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) > UDFToDouble(100)) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) > UDFToDouble(100)) type: boolean @@ -454,9 +464,9 @@ type: bigint Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -471,7 +481,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src Reduce Operator Tree: @@ -493,7 +503,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1365573442/10004 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1727048194/10004 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -516,7 +526,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/675600267/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1156663841/10000 128 3 146 val_146 2 150 val_150 1 Index: ql/src/test/results/clientpositive/udf9.q.out =================================================================== --- ql/src/test/results/clientpositive/udf9.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf9.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(86)) @@ -85,5 +87,5 @@ DATE_SUB('2007-02-28', 365), DATE_SUB('2007-02-28 01:12:34', 730) FROM src WHERE src.key = 86 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2009678608/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/174840493/10000 -1 2 32 -1 2009-01-01 2009-12-31 2008-03-01 2009-03-02 2008-02-28 2009-02-27 2008-12-31 2008-01-02 2008-02-28 2009-02-27 2006-02-28 2005-02-28 Index: ql/src/test/results/clientpositive/input17.q.out =================================================================== --- ql/src/test/results/clientpositive/input17.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input17.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: (aint + lint[0]) @@ -92,7 +94,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/367319018/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2071608019/10000 NULL null -1461153966 {"myint":49,"mystring":"343","underscore_int":7} -1952710705 {"myint":25,"mystring":"125","underscore_int":5} Index: ql/src/test/results/clientpositive/groupby1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby1.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -52,7 +54,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - invalidscheme:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/2040523337/10002 + invalidscheme:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1292130777/10002 Reduce Output Operator key expressions: expr: _col0 @@ -113,7 +115,7 @@ Output: default/dest_g1 query: SELECT dest_g1.* FROM dest_g1 Input: default/dest_g1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/2011356665/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/216605822/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/udf_case.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_case.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_case.q.out (working copy) @@ -37,6 +37,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: CASE (1) WHEN (1) THEN (2) WHEN (3) THEN (4) ELSE (5) END @@ -92,5 +94,5 @@ END FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1363344464/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1655100692/10000 2 5 15 NULL 20 24 Index: ql/src/test/results/clientpositive/union4.q.out =================================================================== --- ql/src/test/results/clientpositive/union4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union4.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -60,7 +62,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1477953430/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/793944135/10002 Union Select Operator expressions: @@ -84,7 +86,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1477953430/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/793944135/10004 Union Select Operator expressions: @@ -115,10 +117,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/581712304/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1820922097/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1477953430/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/793944135/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -155,6 +157,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator Group By Operator aggregations: @@ -196,7 +200,7 @@ Output: default/tmptable query: select * from tmptable x sort by x.key Input: default/tmptable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1149934925/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1401758833/10000 tst1 500 tst2 500 query: drop table tmptable Index: ql/src/test/results/clientpositive/ppd_clusterby.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_clusterby.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_clusterby.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(10)) @@ -57,7 +59,7 @@ query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/146577013/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/661923183/10000 10 val_10 query: EXPLAIN SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 @@ -74,6 +76,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Reduce Output Operator key expressions: expr: key @@ -87,6 +91,8 @@ expr: key type: string x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -136,7 +142,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1806574549/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/610411732/10002 Reduce Output Operator key expressions: expr: _col1 @@ -169,5 +175,5 @@ query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/484426586/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/235995833/10000 20 val_20 20 Index: ql/src/test/results/clientpositive/groupby9.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby9.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby9.q.out (working copy) @@ -21,6 +21,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Reduce Output Operator key expressions: expr: substr(value, 5) @@ -71,7 +73,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/576845986/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2056299757/10004 Reduce Output Operator key expressions: expr: _col0 @@ -129,7 +131,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/576845986/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2056299757/10005 Reduce Output Operator key expressions: expr: _col0 @@ -203,7 +205,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/618560359/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1454113650/10000 0 1 10 1 100 1 @@ -515,7 +517,7 @@ 98 1 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/665362650/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1099145371/10000 0 val_0 1 10 val_10 1 100 val_100 1 Index: ql/src/test/results/clientpositive/groupby11.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby11.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby11.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Reduce Output Operator key expressions: expr: key @@ -73,7 +75,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2088834380/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/353987509/10004 Reduce Output Operator key expressions: expr: _col0 @@ -140,7 +142,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2088834380/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/353987509/10005 Reduce Output Operator key expressions: expr: _col0 @@ -215,7 +217,7 @@ Output: default/dest2/ds=111 query: SELECT * from dest1 Input: default/dest1/ds=111 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1942173858/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1308166623/10000 val_0 3 1 111 val_10 1 1 111 val_100 2 1 111 @@ -527,7 +529,7 @@ val_98 2 1 111 query: SELECT * from dest2 Input: default/dest2/ds=111 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2116311815/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/208593605/10000 0 3 1 111 10 1 1 111 100 2 1 111 Index: ql/src/test/results/clientpositive/union20.q.out =================================================================== --- ql/src/test/results/clientpositive/union20.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union20.q.out (working copy) @@ -31,6 +31,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc2-subquery1:s3 + TableScan + alias: s3 Select Operator Group By Operator aggregations: @@ -66,7 +68,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1080405211/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2007188536/10002 Union Reduce Output Operator key expressions: @@ -82,7 +84,7 @@ type: string expr: _col1 type: string - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1080405211/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2007188536/10003 Union Reduce Output Operator key expressions: @@ -143,6 +145,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc2-subquery2:s4 + TableScan + alias: s4 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -169,6 +173,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc1-subquery2:s2 + TableScan + alias: s2 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -194,7 +200,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1080405211/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2007188536/10004 Union File Output Operator compressed: false @@ -202,7 +208,7 @@ table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1080405211/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2007188536/10006 Union File Output Operator compressed: false @@ -215,6 +221,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc1-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -262,7 +270,7 @@ select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2 ON (unionsrc1.key = unionsrc2.key) Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1630032215/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1517739489/10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/mapreduce7.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce7.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce7.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -104,7 +106,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1768459634/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1462787942/10000 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/udf_when.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_when.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_when.q.out (working copy) @@ -37,6 +37,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: CASE WHEN ((1 = 1)) THEN (2) WHEN ((1 = 3)) THEN (4) ELSE (5) END @@ -92,5 +94,5 @@ END FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2013202159/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/93135016/10000 2 9 14 NULL 24 NULL Index: ql/src/test/results/clientpositive/ppd_outer_join2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_outer_join2.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Filter Operator predicate: expr: ((key > '15') and (key < '25')) @@ -37,6 +39,8 @@ expr: value type: string a + TableScan + alias: a Reduce Output Operator key expressions: expr: key @@ -94,7 +98,7 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/1070507636/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1625042104/10000 150 val_150 150 val_150 152 val_152 152 val_152 152 val_152 152 val_152 Index: ql/src/test/results/clientpositive/join21.q.out =================================================================== --- ql/src/test/results/clientpositive/join21.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join21.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(10)) @@ -36,6 +38,8 @@ expr: value type: string src3 + TableScan + alias: src3 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -59,6 +63,8 @@ expr: value type: string src1 + TableScan + alias: src1 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -116,7 +122,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1067546939/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/816848024/10002 Reduce Output Operator key expressions: expr: _col0 @@ -162,7 +168,7 @@ query: SELECT * FROM src src1 LEFT OUTER JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key > 10) RIGHT OUTER JOIN src src3 ON (src2.key = src3.key AND src3.key < 10) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1152379238/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/743288814/10000 NULL NULL NULL NULL 0 val_0 NULL NULL NULL NULL 0 val_0 NULL NULL NULL NULL 0 val_0 Index: ql/src/test/results/clientpositive/udf_abs.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_abs.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_abs.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: abs(0) @@ -51,7 +53,7 @@ abs(9223372036854775807) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_1/build/ql/tmp/568873688/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1827607130/10000 0 1 123 9223372036854775807 9223372036854775807 query: EXPLAIN SELECT abs(0.0), @@ -70,6 +72,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: abs(0.0) @@ -98,5 +102,5 @@ abs(3.14159265) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_1/build/ql/tmp/1179175977/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/150146674/10000 0.0 3.14159265 3.14159265 Index: ql/src/test/results/clientpositive/groupby2_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby2_map.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -102,7 +104,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/367010437/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/907085747/10000 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/input26.q.out =================================================================== --- ql/src/test/results/clientpositive/input26.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input26.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:subq-subquery1:a + TableScan + alias: a Filter Operator predicate: expr: ((ds = '2008-04-08') and (hr = '11')) @@ -66,7 +68,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/136969549/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1082108429/10002 Union Select Operator expressions: @@ -85,7 +87,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/136969549/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1082108429/10003 Union Select Operator expressions: @@ -109,6 +111,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:subq-subquery2:b + TableScan + alias: b Filter Operator predicate: expr: ((ds = '2008-04-08') and (hr = '14')) @@ -162,7 +166,7 @@ select * from srcpart b where b.ds = '2008-04-08' and b.hr = '14' limit 5 )subq Input: default/srcpart/ds=2008-04-08/hr=11 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/116487966/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/168463886/10000 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 0) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 0) @@ -0,0 +1,192 @@ +query: CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE +query: EXPLAIN EXTENDED +FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1) +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) ds) '2008-04-08')) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + TableScan + alias: src + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Reduce Output Operator + key expressions: + expr: substr(key, 1, 1) + type: string + expr: substr(value, 5) + type: string + sort order: ++ + Map-reduce partition columns: + expr: substr(key, 1, 1) + type: string + tag: -1 + Needs Tagging: false + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Group By Operator + aggregations: + expr: count(DISTINCT KEY._col1) + expr: sum(KEY._col1) + keys: + expr: KEY._col0 + type: string + mode: complete + outputColumnNames: _col0, _col1, _col2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + expr: concat(_col0, UDFToString(_col2)) + type: string + outputColumnNames: _col0, _col1, _col2 + Select Operator + expressions: + expr: _col0 + type: string + expr: UDFToInteger(_col1) + type: int + expr: _col2 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/633188587/10000 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest1 + columns.types string:int:string + serialization.ddl struct dest1 { string key, i32 c1, string c2} + serialization.format 1 + columns key,c1,c2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: true + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/633188587/10000 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest1 + columns.types string:int:string + serialization.ddl struct dest1 { string key, i32 c1, string c2} + serialization.format 1 + columns key,c1,c2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest1 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/633188587/10001 + + +query: FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1) +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Output: default/dest1 +query: SELECT dest1.* FROM dest1 +Input: default/dest1 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/561326404/10000 +0 1 00.0 +1 71 132828.0 +2 69 251142.0 +3 62 364008.0 +4 74 4105526.0 +5 6 5794.0 +6 5 6796.0 +7 6 71470.0 +8 8 81524.0 +9 7 92094.0 Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -19,11 +19,15 @@ Map Reduce Alias -> Map Operator Tree: t + TableScan + alias: t Filter Operator + isSamplingPred: false predicate: expr: (((hash(key) & 2147483647) % 10) = 0) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 10) = 0) type: boolean @@ -36,11 +40,15 @@ expr: value type: string s + TableScan + alias: s Filter Operator + isSamplingPred: false predicate: expr: (((((((hash(key) & 2147483647) % 1) = 0) and (ds = '2008-04-08')) and (hr = '11')) and (ds = '2008-04-08')) and (hr = '11')) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 1) = 0) type: boolean @@ -58,12 +66,12 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -82,10 +90,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -104,10 +112,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 Partition partition values: ds 2008-04-09 @@ -126,10 +134,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Partition partition values: ds 2008-04-09 @@ -148,7 +156,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart Reduce Operator Tree: @@ -160,6 +168,7 @@ 1 {VALUE._col0} {VALUE._col1} outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Filter Operator + isSamplingPred: false predicate: expr: ((((((_col4 = _col0) and (_col5 = _col1)) and (_col2 = '2008-04-08')) and (_col3 = '11')) and (_col2 = '2008-04-08')) and (_col3 = '11')) type: boolean @@ -177,7 +186,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1665558556/10002 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -189,7 +198,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1665558556/10002 Reduce Output Operator key expressions: expr: _col0 @@ -214,9 +223,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1665558556/10002 Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1665558556/10002 Partition input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -230,7 +239,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1665558556/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -254,7 +263,7 @@ Input: default/srcpart/ds=2008-04-08/hr=12 Input: default/srcpart/ds=2008-04-09/hr=11 Input: default/srcpart/ds=2008-04-09/hr=12 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1151844475/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/19992458/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/join29.q.out =================================================================== --- ql/src/test/results/clientpositive/join29.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join29.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: subq2:y + TableScan + alias: y Select Operator expressions: expr: key @@ -74,7 +76,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1125941767/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/907137389/10002 Common Join Operator condition map: Inner Join 0 to 1 @@ -124,11 +126,11 @@ Local Work: Map Reduce Local Work Alias -> Map Local Tables: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1125941767/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/907137389/10004 Fetch Operator limit: -1 Alias -> Map Local Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1125941767/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/907137389/10004 Common Join Operator condition map: Inner Join 0 to 1 @@ -182,10 +184,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/963830938/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/910865729/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1125941767/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/907137389/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -224,6 +226,8 @@ Map Reduce Alias -> Map Operator Tree: subq1:x + TableScan + alias: x Select Operator expressions: expr: key @@ -282,7 +286,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1159367121/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1649562514/10000 128 1 3 146 1 2 150 1 1 Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 0) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 0) @@ -0,0 +1,466 @@ +query: CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE +query: EXPLAIN EXTENDED +INSERT OVERWRITE TABLE dest_j1 +SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value +FROM src1 x JOIN src y ON (x.key = y.key) +JOIN srcpart z ON (x.key = z.key) +WHERE z.ds='2008-04-08' and z.hr=11 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF src1 x) (TOK_TABREF src y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key))) (TOK_TABREF srcpart z) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL z) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST x y))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL z) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) value))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL z) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL z) hr) 11))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-4 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-4 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + z + TableScan + alias: z + Filter Operator + isSamplingPred: false + predicate: + expr: ((ds = '2008-04-08') and (UDFToDouble(hr) = UDFToDouble(11))) + type: boolean + Common Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {key} + 1 {value} + 2 {value} {ds} {hr} + keys: + 0 + 1 + 2 + outputColumnNames: _col0, _col3, _col5, _col6, _col7 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + expr: _col5 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col3, _col5, _col6, _col7 + Filter Operator + isSamplingPred: false + predicate: + expr: ((_col6 = '2008-04-08') and (UDFToDouble(_col7) = UDFToDouble(11))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1816268167/10002 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest_j1 + columns.types string:string:string + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + columns key,value,val2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest_j1 + Local Work: + Map Reduce Local Work + Alias -> Map Local Tables: + y + Fetch Operator + limit: -1 + x + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + y + TableScan + alias: y + Common Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {key} + 1 {value} + 2 {value} {ds} {hr} + keys: + 0 + 1 + 2 + outputColumnNames: _col0, _col3, _col5, _col6, _col7 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + expr: _col5 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col3, _col5, _col6, _col7 + Filter Operator + isSamplingPred: false + predicate: + expr: ((_col6 = '2008-04-08') and (UDFToDouble(_col7) = UDFToDouble(11))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1816268167/10002 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest_j1 + columns.types string:string:string + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + columns key,value,val2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest_j1 + x + TableScan + alias: x + Common Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {key} + 1 {value} + 2 {value} {ds} {hr} + keys: + 0 + 1 + 2 + outputColumnNames: _col0, _col3, _col5, _col6, _col7 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + expr: _col5 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col3, _col5, _col6, _col7 + Filter Operator + isSamplingPred: false + predicate: + expr: ((_col6 = '2008-04-08') and (UDFToDouble(_col7) = UDFToDouble(11))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1816268167/10002 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest_j1 + columns.types string:string:string + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + columns key,value,val2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest_j1 + Needs Tagging: false + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + + Stage: Stage-4 + Conditional Operator + list of dependent Tasks: + Move Operator + files: + hdfs directory: true + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1816268167/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2009479673/10000 + Map Reduce + Alias -> Map Operator Tree: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1816268167/10002 + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: key + type: string + expr: value + type: string + expr: val2 + type: string + Needs Tagging: false + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1816268167/10002 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1816268167/10002 + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest_j1 + columns.types string:string:string + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + columns key,value,val2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest_j1 + Reduce Operator Tree: + Extract + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2009479673/10000 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest_j1 + columns.types string:string:string + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + columns key,value,val2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest_j1 + + Stage: Stage-0 + Move Operator + tables: + replace: true + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2009479673/10000 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest_j1 + columns.types string:string:string + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + columns key,value,val2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest_j1 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2009479673/10001 + + +query: INSERT OVERWRITE TABLE dest_j1 +SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value +FROM src1 x JOIN src y ON (x.key = y.key) +JOIN srcpart z ON (x.key = z.key) +WHERE z.ds='2008-04-08' and z.hr=11 +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/src +Input: default/src1 +Output: default/dest_j1 +query: select * from dest_j1 x order by x.key +Input: default/dest_j1 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/494281575/10000 +128 val_128 val_128 +128 val_128 val_128 +128 val_128 val_128 +128 val_128 val_128 +128 val_128 val_128 +128 val_128 val_128 +128 val_128 val_128 +128 val_128 val_128 +128 val_128 val_128 +146 val_146 val_146 +146 val_146 val_146 +146 val_146 val_146 +146 val_146 val_146 +150 val_150 val_150 +213 val_213 val_213 +213 val_213 val_213 +213 val_213 val_213 +213 val_213 val_213 +224 val_224 val_224 +224 val_224 val_224 +224 val_224 val_224 +224 val_224 val_224 +238 val_238 val_238 +238 val_238 val_238 +238 val_238 val_238 +238 val_238 val_238 +255 val_255 val_255 +255 val_255 val_255 +255 val_255 val_255 +255 val_255 val_255 +273 val_273 val_273 +273 val_273 val_273 +273 val_273 val_273 +273 val_273 val_273 +273 val_273 val_273 +273 val_273 val_273 +273 val_273 val_273 +273 val_273 val_273 +273 val_273 val_273 +278 val_278 val_278 +278 val_278 val_278 +278 val_278 val_278 +278 val_278 val_278 +311 val_311 val_311 +311 val_311 val_311 +311 val_311 val_311 +311 val_311 val_311 +311 val_311 val_311 +311 val_311 val_311 +311 val_311 val_311 +311 val_311 val_311 +311 val_311 val_311 +369 val_369 val_369 +369 val_369 val_369 +369 val_369 val_369 +369 val_369 val_369 +369 val_369 val_369 +369 val_369 val_369 +369 val_369 val_369 +369 val_369 val_369 +369 val_369 val_369 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +401 val_401 val_401 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +406 val_406 val_406 +66 val_66 val_66 +98 val_98 val_98 +98 val_98 val_98 +98 val_98 val_98 +98 val_98 val_98 +query: drop table dest_j1 Index: ql/src/test/results/clientpositive/notable_alias1.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/notable_alias1.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -100,7 +102,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/626489776/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1559429496/10000 1234 0 3.0 1234 10 1.0 1234 11 1.0 Index: ql/src/test/results/clientpositive/transform_ppr2.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 0) +++ ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 0) @@ -0,0 +1,325 @@ +query: EXPLAIN EXTENDED +FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + WHERE src.ds = '2008-04-08' + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) ds) (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST ds tkey tvalue)))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) ds) '2008-04-08')) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + tmap:src + TableScan + alias: src + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Select Operator + expressions: + expr: ds + type: string + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1, _col2 + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns ds,tkey,tvalue + serialization.format 9 + Reduce Output Operator + key expressions: + expr: tkey + type: string + sort order: + + Map-reduce partition columns: + expr: tkey + type: string + tag: -1 + value expressions: + expr: ds + type: string + expr: tkey + type: string + expr: tvalue + type: string + Needs Tagging: false + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Extract + Filter Operator + isSamplingPred: false + predicate: + expr: (UDFToDouble(_col1) < UDFToDouble(100)) + type: boolean + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/76281111/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + serialization.format 1 + columns.types string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + WHERE src.ds = '2008-04-08' + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/908564942/10000 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +10 val_10 +11 val_11 +11 val_11 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +17 val_17 +17 val_17 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +19 val_19 +19 val_19 +2 val_2 +2 val_2 +20 val_20 +20 val_20 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +27 val_27 +27 val_27 +28 val_28 +28 val_28 +30 val_30 +30 val_30 +33 val_33 +33 val_33 +34 val_34 +34 val_34 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +4 val_4 +4 val_4 +41 val_41 +41 val_41 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +43 val_43 +43 val_43 +44 val_44 +44 val_44 +47 val_47 +47 val_47 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +53 val_53 +53 val_53 +54 val_54 +54 val_54 +57 val_57 +57 val_57 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +64 val_64 +64 val_64 +65 val_65 +65 val_65 +66 val_66 +66 val_66 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +69 val_69 +69 val_69 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +74 val_74 +74 val_74 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +77 val_77 +77 val_77 +78 val_78 +78 val_78 +8 val_8 +8 val_8 +80 val_80 +80 val_80 +82 val_82 +82 val_82 +83 val_83 +83 val_83 +83 val_83 +83 val_83 +84 val_84 +84 val_84 +84 val_84 +84 val_84 +85 val_85 +85 val_85 +86 val_86 +86 val_86 +87 val_87 +87 val_87 +9 val_9 +9 val_9 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +92 val_92 +95 val_95 +95 val_95 +95 val_95 +95 val_95 +96 val_96 +96 val_96 +97 val_97 +97 val_97 +97 val_97 +97 val_97 +98 val_98 +98 val_98 +98 val_98 +98 val_98 Index: ql/src/test/results/clientpositive/join1.q.out =================================================================== --- ql/src/test/results/clientpositive/join1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join1.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -27,6 +29,8 @@ expr: value type: string src1 + TableScan + alias: src1 Reduce Output Operator key expressions: expr: key @@ -87,7 +91,7 @@ Output: default/dest_j1 query: SELECT dest_j1.* FROM dest_j1 Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/1786745892/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/862388733/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/ppd_multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_multi_insert.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_multi_insert.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Reduce Output Operator key expressions: expr: key @@ -35,6 +37,8 @@ type: string tag: 1 a + TableScan + alias: a Reduce Output Operator key expressions: expr: key @@ -199,7 +203,7 @@ Output: ../build/ql/test/data/warehouse/mi4.out query: SELECT mi1.* FROM mi1 Input: default/mi1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/613202794/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1662084677/10000 0 val_0 0 val_0 0 val_0 @@ -350,7 +354,7 @@ 98 val_98 query: SELECT mi2.* FROM mi2 Input: default/mi2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/574876580/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1663619015/10000 100 val_100 100 val_100 100 val_100 @@ -564,7 +568,7 @@ 199 val_199 query: SELECT mi3.* FROM mi3 Input: default/mi3/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1848936782/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/660410600/10000 200 2008-04-08 12 200 2008-04-08 12 200 2008-04-08 12 Index: ql/src/test/results/clientpositive/input6.q.out =================================================================== --- ql/src/test/results/clientpositive/input6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input6.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src1 + TableScan + alias: src1 Filter Operator predicate: expr: key is null @@ -45,10 +47,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/514038565/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2028160907/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1369456847/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/886482304/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -88,4 +90,4 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/439706537/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2074725714/10000 Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -27,7 +29,10 @@ expr: value type: string src1 + TableScan + alias: src1 Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (hr = '12')) type: boolean @@ -49,10 +54,10 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -67,10 +72,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -89,7 +94,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart Reduce Operator Tree: @@ -101,6 +106,7 @@ 1 {VALUE._col1} outputColumnNames: _col0, _col2, _col3, _col5 Filter Operator + isSamplingPred: false predicate: expr: ((_col2 = '2008-04-08') and (_col3 = '12')) type: boolean @@ -121,7 +127,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1105796087/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/863266158/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -135,7 +141,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -143,7 +149,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1105796087/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/863266158/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -157,10 +163,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1105796087/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/863266158/10001 query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) @@ -170,7 +176,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/231667513/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/367697399/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/udf_split.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_split.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_split.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: split('a b c', ' ') @@ -47,5 +49,5 @@ split(50401020, 0) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_1/build/ql/tmp/136874203/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2093678093/10000 ["a","b","c"] ["one","two","three"] [] ["5","4","1","2"] Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy) @@ -17,7 +17,10 @@ Map Reduce Alias -> Map Operator Tree: a + TableScan + alias: a Filter Operator + isSamplingPred: false predicate: expr: ((rand(UDFToLong(1)) < 0.1) and (ds = '2008-04-08')) type: boolean @@ -35,7 +38,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1095666226/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/297745997/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -49,15 +52,15 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -76,10 +79,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -98,7 +101,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -108,11 +111,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1095666226/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/329704235/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/297745997/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1082152634/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1095666226/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/297745997/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -130,9 +133,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1095666226/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/297745997/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1095666226/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/297745997/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -147,7 +150,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable Reduce Operator Tree: @@ -155,7 +158,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/329704235/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1082152634/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -168,7 +171,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable @@ -177,7 +180,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/329704235/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1082152634/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -191,10 +194,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/tmptable + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/tmptable serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/329704235/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1082152634/10001 query: insert overwrite table tmptable @@ -204,7 +207,7 @@ Output: default/tmptable query: select * from tmptable x sort by x.key,x.value,x.ds,x.hr Input: default/tmptable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/960645476/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1193259672/10000 103 val_103 2008-04-08 11 103 val_103 2008-04-08 12 133 val_133 2008-04-08 11 Index: ql/src/test/results/clientpositive/union14.q.out =================================================================== --- ql/src/test/results/clientpositive/union14.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union14.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -37,7 +39,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/142260489/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1650267347/10002 Union Select Operator expressions: @@ -64,7 +66,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/142260489/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1650267347/10003 Union Select Operator expressions: @@ -118,6 +120,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -161,7 +165,7 @@ unionsrc group by unionsrc.key Input: default/src1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/239114324/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/368749311/10000 10 128 1 146 1 Index: ql/src/test/results/clientpositive/binarysortable_1.q.out =================================================================== --- ql/src/test/results/clientpositive/binarysortable_1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/binarysortable_1.q.out (working copy) @@ -22,6 +22,8 @@ Map Reduce Alias -> Map Operator Tree: a:mytable + TableScan + alias: mytable Select Operator expressions: expr: key @@ -91,7 +93,7 @@ GROUP BY key ) a Input: default/mytable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1922354825/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2078565705/10000 ^@^@^@ 7.0 ^@^A^@ 9.0 ^@test^@ 2.0 Index: ql/src/test/results/clientpositive/groupby4_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby4_noskew.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -66,7 +68,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1738488269/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/153160326/10000 0 1 2 Index: ql/src/test/results/clientpositive/join30.q.out =================================================================== --- ql/src/test/results/clientpositive/join30.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join30.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -41,6 +43,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -62,7 +66,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/190127938/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2032153196/10002 Select Operator expressions: expr: _col0 @@ -143,7 +147,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1623598588/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/62701175/10000 66 1 98 2 128 3 Index: ql/src/test/results/clientpositive/input12.q.out =================================================================== --- ql/src/test/results/clientpositive/input12.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input12.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -104,10 +106,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/170032054/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1924563025/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/151034280/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/467218372/10006 Reduce Output Operator sort order: Map-reduce partition columns: @@ -146,10 +148,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/170032054/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1924563025/10002 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/151034280/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/467218372/10007 Reduce Output Operator sort order: Map-reduce partition columns: @@ -188,10 +190,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/170032054/10004 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1924563025/10004 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/151034280/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/467218372/10008 Reduce Output Operator sort order: Map-reduce partition columns: @@ -236,7 +238,7 @@ Output: default/dest3/ds=2008-04-08/hr=12 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1444798074/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1597762873/10000 86 val_86 27 val_27 98 val_98 @@ -323,7 +325,7 @@ 97 val_97 query: SELECT dest2.* FROM dest2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1209928310/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/106250860/10000 165 val_165 193 val_193 150 val_150 @@ -431,7 +433,7 @@ 169 val_169 query: SELECT dest3.* FROM dest3 Input: default/dest3/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1215159045/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/852643259/10000 238 2008-04-08 12 311 2008-04-08 12 409 2008-04-08 12 Index: ql/src/test/results/clientpositive/udf4.q.out =================================================================== --- ql/src/test/results/clientpositive/udf4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf4.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: round(1.0) @@ -71,5 +73,5 @@ query: SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1 Input: default/dest1 -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/721635558/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1127512298/10000 1 2 -2 1 1 -2 1.0 NULL 0.0 1 2 -1 1 0.731057369148862 3 -3 3 -1 -2 Index: ql/src/test/results/clientpositive/join15.q.out =================================================================== --- ql/src/test/results/clientpositive/join15.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join15.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -28,6 +30,8 @@ expr: value type: string src1 + TableScan + alias: src1 Reduce Output Operator key expressions: expr: key @@ -71,7 +75,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1355940313/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/129760199/10002 Reduce Output Operator key expressions: expr: _col0 @@ -109,7 +113,7 @@ query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key) SORT BY src1.key, src1.value, src2.key, src2.value Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/839547725/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/340744223/10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/udf_size.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_size.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_size.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Filter Operator predicate: expr: (lint is not null and not mstringstring is null) @@ -58,5 +60,5 @@ WHERE src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1 Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/8998980/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/151376732/10000 3 1 1 -1 Index: ql/src/test/results/clientpositive/union_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/union_ppr.q.out (revision 0) +++ ql/src/test/results/clientpositive/union_ppr.q.out (revision 0) @@ -0,0 +1,545 @@ +query: EXPLAIN EXTENDED +SELECT * FROM ( + SELECT X.* FROM SRCPART X WHERE X.key < 100 + UNION ALL + SELECT Y.* FROM SRCPART Y WHERE Y.key < 100 +) A +WHERE A.ds = '2008-04-08' +SORT BY A.key +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART X)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF X))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL X) key) 100)))) (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART Y)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF Y))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL Y) key) 100))))) A)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL A) ds) '2008-04-08')) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL A) key))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + null-subquery1:a-subquery1:x + TableScan + alias: x + Filter Operator + isSamplingPred: false + predicate: + expr: ((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-08')) + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (UDFToDouble(key) < UDFToDouble(100)) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Filter Operator + isSamplingPred: false + predicate: + expr: (_col2 = '2008-04-08') + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + null-subquery2:a-subquery2:y + TableScan + alias: y + Filter Operator + isSamplingPred: false + predicate: + expr: ((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-08')) + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (UDFToDouble(key) < UDFToDouble(100)) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Filter Operator + isSamplingPred: false + predicate: + expr: (_col2 = '2008-04-08') + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + Needs Tagging: false + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Extract + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/572148807/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: SELECT * FROM ( + SELECT X.* FROM SRCPART X WHERE X.key < 100 + UNION ALL + SELECT Y.* FROM SRCPART Y WHERE Y.key < 100 +) A +WHERE A.ds = '2008-04-08' +SORT BY A.key +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/548110584/10000 +0 val_0 2008-04-08 11 +0 val_0 2008-04-08 11 +0 val_0 2008-04-08 11 +0 val_0 2008-04-08 11 +0 val_0 2008-04-08 11 +0 val_0 2008-04-08 11 +0 val_0 2008-04-08 12 +0 val_0 2008-04-08 12 +0 val_0 2008-04-08 12 +0 val_0 2008-04-08 12 +0 val_0 2008-04-08 12 +0 val_0 2008-04-08 12 +10 val_10 2008-04-08 12 +10 val_10 2008-04-08 12 +10 val_10 2008-04-08 11 +10 val_10 2008-04-08 11 +11 val_11 2008-04-08 11 +11 val_11 2008-04-08 11 +11 val_11 2008-04-08 12 +11 val_11 2008-04-08 12 +12 val_12 2008-04-08 12 +12 val_12 2008-04-08 12 +12 val_12 2008-04-08 12 +12 val_12 2008-04-08 12 +12 val_12 2008-04-08 11 +12 val_12 2008-04-08 11 +12 val_12 2008-04-08 11 +12 val_12 2008-04-08 11 +15 val_15 2008-04-08 11 +15 val_15 2008-04-08 11 +15 val_15 2008-04-08 11 +15 val_15 2008-04-08 11 +15 val_15 2008-04-08 12 +15 val_15 2008-04-08 12 +15 val_15 2008-04-08 12 +15 val_15 2008-04-08 12 +17 val_17 2008-04-08 12 +17 val_17 2008-04-08 12 +17 val_17 2008-04-08 11 +17 val_17 2008-04-08 11 +18 val_18 2008-04-08 11 +18 val_18 2008-04-08 11 +18 val_18 2008-04-08 11 +18 val_18 2008-04-08 11 +18 val_18 2008-04-08 12 +18 val_18 2008-04-08 12 +18 val_18 2008-04-08 12 +18 val_18 2008-04-08 12 +19 val_19 2008-04-08 12 +19 val_19 2008-04-08 12 +19 val_19 2008-04-08 11 +19 val_19 2008-04-08 11 +2 val_2 2008-04-08 11 +2 val_2 2008-04-08 11 +2 val_2 2008-04-08 12 +2 val_2 2008-04-08 12 +20 val_20 2008-04-08 12 +20 val_20 2008-04-08 12 +20 val_20 2008-04-08 11 +20 val_20 2008-04-08 11 +24 val_24 2008-04-08 11 +24 val_24 2008-04-08 11 +24 val_24 2008-04-08 11 +24 val_24 2008-04-08 11 +24 val_24 2008-04-08 12 +24 val_24 2008-04-08 12 +24 val_24 2008-04-08 12 +24 val_24 2008-04-08 12 +26 val_26 2008-04-08 12 +26 val_26 2008-04-08 12 +26 val_26 2008-04-08 12 +26 val_26 2008-04-08 12 +26 val_26 2008-04-08 11 +26 val_26 2008-04-08 11 +26 val_26 2008-04-08 11 +26 val_26 2008-04-08 11 +27 val_27 2008-04-08 11 +27 val_27 2008-04-08 11 +27 val_27 2008-04-08 12 +27 val_27 2008-04-08 12 +28 val_28 2008-04-08 12 +28 val_28 2008-04-08 12 +28 val_28 2008-04-08 11 +28 val_28 2008-04-08 11 +30 val_30 2008-04-08 11 +30 val_30 2008-04-08 11 +30 val_30 2008-04-08 12 +30 val_30 2008-04-08 12 +33 val_33 2008-04-08 12 +33 val_33 2008-04-08 12 +33 val_33 2008-04-08 11 +33 val_33 2008-04-08 11 +34 val_34 2008-04-08 11 +34 val_34 2008-04-08 11 +34 val_34 2008-04-08 12 +34 val_34 2008-04-08 12 +35 val_35 2008-04-08 12 +35 val_35 2008-04-08 12 +35 val_35 2008-04-08 12 +35 val_35 2008-04-08 12 +35 val_35 2008-04-08 12 +35 val_35 2008-04-08 12 +35 val_35 2008-04-08 11 +35 val_35 2008-04-08 11 +35 val_35 2008-04-08 11 +35 val_35 2008-04-08 11 +35 val_35 2008-04-08 11 +35 val_35 2008-04-08 11 +37 val_37 2008-04-08 11 +37 val_37 2008-04-08 11 +37 val_37 2008-04-08 11 +37 val_37 2008-04-08 11 +37 val_37 2008-04-08 12 +37 val_37 2008-04-08 12 +37 val_37 2008-04-08 12 +37 val_37 2008-04-08 12 +4 val_4 2008-04-08 12 +4 val_4 2008-04-08 12 +4 val_4 2008-04-08 11 +4 val_4 2008-04-08 11 +41 val_41 2008-04-08 11 +41 val_41 2008-04-08 11 +41 val_41 2008-04-08 12 +41 val_41 2008-04-08 12 +42 val_42 2008-04-08 12 +42 val_42 2008-04-08 12 +42 val_42 2008-04-08 12 +42 val_42 2008-04-08 12 +42 val_42 2008-04-08 11 +42 val_42 2008-04-08 11 +42 val_42 2008-04-08 11 +42 val_42 2008-04-08 11 +43 val_43 2008-04-08 11 +43 val_43 2008-04-08 11 +43 val_43 2008-04-08 12 +43 val_43 2008-04-08 12 +44 val_44 2008-04-08 12 +44 val_44 2008-04-08 12 +44 val_44 2008-04-08 11 +44 val_44 2008-04-08 11 +47 val_47 2008-04-08 11 +47 val_47 2008-04-08 11 +47 val_47 2008-04-08 12 +47 val_47 2008-04-08 12 +5 val_5 2008-04-08 12 +5 val_5 2008-04-08 12 +5 val_5 2008-04-08 12 +5 val_5 2008-04-08 12 +5 val_5 2008-04-08 12 +5 val_5 2008-04-08 12 +5 val_5 2008-04-08 11 +5 val_5 2008-04-08 11 +5 val_5 2008-04-08 11 +5 val_5 2008-04-08 11 +5 val_5 2008-04-08 11 +5 val_5 2008-04-08 11 +51 val_51 2008-04-08 11 +51 val_51 2008-04-08 11 +51 val_51 2008-04-08 11 +51 val_51 2008-04-08 11 +51 val_51 2008-04-08 12 +51 val_51 2008-04-08 12 +51 val_51 2008-04-08 12 +51 val_51 2008-04-08 12 +53 val_53 2008-04-08 12 +53 val_53 2008-04-08 12 +53 val_53 2008-04-08 11 +53 val_53 2008-04-08 11 +54 val_54 2008-04-08 11 +54 val_54 2008-04-08 11 +54 val_54 2008-04-08 12 +54 val_54 2008-04-08 12 +57 val_57 2008-04-08 12 +57 val_57 2008-04-08 12 +57 val_57 2008-04-08 11 +57 val_57 2008-04-08 11 +58 val_58 2008-04-08 11 +58 val_58 2008-04-08 11 +58 val_58 2008-04-08 11 +58 val_58 2008-04-08 11 +58 val_58 2008-04-08 12 +58 val_58 2008-04-08 12 +58 val_58 2008-04-08 12 +58 val_58 2008-04-08 12 +64 val_64 2008-04-08 12 +64 val_64 2008-04-08 12 +64 val_64 2008-04-08 11 +64 val_64 2008-04-08 11 +65 val_65 2008-04-08 11 +65 val_65 2008-04-08 11 +65 val_65 2008-04-08 12 +65 val_65 2008-04-08 12 +66 val_66 2008-04-08 12 +66 val_66 2008-04-08 12 +66 val_66 2008-04-08 11 +66 val_66 2008-04-08 11 +67 val_67 2008-04-08 11 +67 val_67 2008-04-08 11 +67 val_67 2008-04-08 11 +67 val_67 2008-04-08 11 +67 val_67 2008-04-08 12 +67 val_67 2008-04-08 12 +67 val_67 2008-04-08 12 +67 val_67 2008-04-08 12 +69 val_69 2008-04-08 12 +69 val_69 2008-04-08 12 +69 val_69 2008-04-08 11 +69 val_69 2008-04-08 11 +70 val_70 2008-04-08 11 +70 val_70 2008-04-08 11 +70 val_70 2008-04-08 11 +70 val_70 2008-04-08 11 +70 val_70 2008-04-08 11 +70 val_70 2008-04-08 11 +70 val_70 2008-04-08 12 +70 val_70 2008-04-08 12 +70 val_70 2008-04-08 12 +70 val_70 2008-04-08 12 +70 val_70 2008-04-08 12 +70 val_70 2008-04-08 12 +72 val_72 2008-04-08 12 +72 val_72 2008-04-08 12 +72 val_72 2008-04-08 12 +72 val_72 2008-04-08 12 +72 val_72 2008-04-08 11 +72 val_72 2008-04-08 11 +72 val_72 2008-04-08 11 +72 val_72 2008-04-08 11 +74 val_74 2008-04-08 11 +74 val_74 2008-04-08 11 +74 val_74 2008-04-08 12 +74 val_74 2008-04-08 12 +76 val_76 2008-04-08 12 +76 val_76 2008-04-08 12 +76 val_76 2008-04-08 12 +76 val_76 2008-04-08 12 +76 val_76 2008-04-08 11 +76 val_76 2008-04-08 11 +76 val_76 2008-04-08 11 +76 val_76 2008-04-08 11 +77 val_77 2008-04-08 11 +77 val_77 2008-04-08 11 +77 val_77 2008-04-08 12 +77 val_77 2008-04-08 12 +78 val_78 2008-04-08 12 +78 val_78 2008-04-08 12 +78 val_78 2008-04-08 11 +78 val_78 2008-04-08 11 +8 val_8 2008-04-08 11 +8 val_8 2008-04-08 11 +8 val_8 2008-04-08 12 +8 val_8 2008-04-08 12 +80 val_80 2008-04-08 12 +80 val_80 2008-04-08 12 +80 val_80 2008-04-08 11 +80 val_80 2008-04-08 11 +82 val_82 2008-04-08 11 +82 val_82 2008-04-08 11 +82 val_82 2008-04-08 12 +82 val_82 2008-04-08 12 +83 val_83 2008-04-08 12 +83 val_83 2008-04-08 12 +83 val_83 2008-04-08 12 +83 val_83 2008-04-08 12 +83 val_83 2008-04-08 11 +83 val_83 2008-04-08 11 +83 val_83 2008-04-08 11 +83 val_83 2008-04-08 11 +84 val_84 2008-04-08 11 +84 val_84 2008-04-08 11 +84 val_84 2008-04-08 11 +84 val_84 2008-04-08 11 +84 val_84 2008-04-08 12 +84 val_84 2008-04-08 12 +84 val_84 2008-04-08 12 +84 val_84 2008-04-08 12 +85 val_85 2008-04-08 12 +85 val_85 2008-04-08 12 +85 val_85 2008-04-08 11 +85 val_85 2008-04-08 11 +86 val_86 2008-04-08 11 +86 val_86 2008-04-08 11 +86 val_86 2008-04-08 12 +86 val_86 2008-04-08 12 +87 val_87 2008-04-08 12 +87 val_87 2008-04-08 12 +87 val_87 2008-04-08 11 +87 val_87 2008-04-08 11 +9 val_9 2008-04-08 11 +9 val_9 2008-04-08 11 +9 val_9 2008-04-08 12 +9 val_9 2008-04-08 12 +90 val_90 2008-04-08 12 +90 val_90 2008-04-08 12 +90 val_90 2008-04-08 12 +90 val_90 2008-04-08 12 +90 val_90 2008-04-08 12 +90 val_90 2008-04-08 12 +90 val_90 2008-04-08 11 +90 val_90 2008-04-08 11 +90 val_90 2008-04-08 11 +90 val_90 2008-04-08 11 +90 val_90 2008-04-08 11 +90 val_90 2008-04-08 11 +92 val_92 2008-04-08 11 +92 val_92 2008-04-08 11 +92 val_92 2008-04-08 12 +92 val_92 2008-04-08 12 +95 val_95 2008-04-08 12 +95 val_95 2008-04-08 12 +95 val_95 2008-04-08 12 +95 val_95 2008-04-08 12 +95 val_95 2008-04-08 11 +95 val_95 2008-04-08 11 +95 val_95 2008-04-08 11 +95 val_95 2008-04-08 11 +96 val_96 2008-04-08 11 +96 val_96 2008-04-08 11 +96 val_96 2008-04-08 12 +96 val_96 2008-04-08 12 +97 val_97 2008-04-08 12 +97 val_97 2008-04-08 12 +97 val_97 2008-04-08 12 +97 val_97 2008-04-08 12 +97 val_97 2008-04-08 11 +97 val_97 2008-04-08 11 +97 val_97 2008-04-08 11 +97 val_97 2008-04-08 11 +98 val_98 2008-04-08 11 +98 val_98 2008-04-08 11 +98 val_98 2008-04-08 11 +98 val_98 2008-04-08 11 +98 val_98 2008-04-08 12 +98 val_98 2008-04-08 12 +98 val_98 2008-04-08 12 +98 val_98 2008-04-08 12 Index: ql/src/test/results/clientpositive/groupby4.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby4.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -46,7 +48,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/959139456/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1357905866/10002 Reduce Output Operator key expressions: expr: _col0 @@ -94,7 +96,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1076286767/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1402842530/10000 0 1 2 Index: ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: true is not null @@ -52,7 +54,7 @@ FROM src WHERE true IS NOT NULL LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/998804066/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2088182739/10000 true true true query: EXPLAIN FROM src_thrift @@ -73,6 +75,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Filter Operator predicate: expr: (lint is not null and not mstringstring is null) @@ -110,5 +114,5 @@ WHERE src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1 Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1868442746/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1783450420/10000 true true true Index: ql/src/test/results/clientpositive/input_part7.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part7.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_part7.q.out (working copy) @@ -17,11 +17,15 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:a-subquery1:x + TableScan + alias: x Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(key) < UDFToDouble(100))) type: boolean Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(key) < UDFToDouble(100))) type: boolean @@ -64,11 +68,15 @@ expr: _col3 type: string null-subquery2:a-subquery2:y + TableScan + alias: y Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(key) < UDFToDouble(100))) type: boolean Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(key) < UDFToDouble(100))) type: boolean @@ -112,10 +120,10 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -134,10 +142,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -156,7 +164,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart Reduce Operator Tree: @@ -164,7 +172,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/526431721/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/260684261/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -186,7 +194,7 @@ SORT BY A.key Input: default/srcpart/ds=2008-04-08/hr=11 Input: default/srcpart/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/891014726/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1095258770/10000 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 Index: ql/src/test/results/clientpositive/union7.q.out =================================================================== --- ql/src/test/results/clientpositive/union7.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union7.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -54,7 +56,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1149196999/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1947982246/10002 Union Select Operator expressions: @@ -81,7 +83,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1149196999/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1947982246/10003 Union Select Operator expressions: @@ -135,6 +137,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -159,7 +163,7 @@ select s2.key as key, s2.value as value from src1 s2) unionsrc group by unionsrc.key Input: default/src Input: default/src1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/252887954/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/160103856/10000 10 128 1 146 1 Index: ql/src/test/results/clientpositive/input3_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input3_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input3_limit.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: t:t1 + TableScan + alias: t1 Select Operator expressions: expr: key @@ -62,7 +64,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1988545079/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1953025922/10002 Reduce Output Operator sort order: tag: -1 @@ -99,7 +101,7 @@ Output: default/t2 query: SELECT * FROM T2 SORT BY key, value Input: default/t2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1233838218/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/741107383/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/udf_lower.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_lower.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_lower.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(86)) @@ -41,5 +43,5 @@ query: SELECT lower('AbC 123'), upper('AbC 123') FROM src WHERE key = 86 Input: default/src -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/1693261444/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1100342629/10000 abc 123 ABC 123 Index: ql/src/test/results/clientpositive/mapreduce2.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce2.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -92,7 +94,7 @@ Output: default/dest1 query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2136416254/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1799546978/10000 0 0 0 val_0 0 0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/nullgroup.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup.q.out (revision 801363) +++ ql/src/test/results/clientpositive/nullgroup.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -57,7 +59,7 @@ query: select count(1) from src x where x.key > 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/289311164/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/161603981/10000 0 query: explain select count(1) from src x where x.key > 9999 @@ -73,6 +75,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -118,7 +122,7 @@ query: select count(1) from src x where x.key > 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1460273503/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/720088705/10000 0 query: explain select count(1) from src x where x.key > 9999 @@ -135,6 +139,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -169,7 +175,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/419295241/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1577457021/10002 Reduce Output Operator sort order: tag: -1 @@ -201,7 +207,7 @@ query: select count(1) from src x where x.key > 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1216265581/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1185327154/10000 0 query: explain select count(1) from src x where x.key > 9999 @@ -217,6 +223,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -257,5 +265,5 @@ query: select count(1) from src x where x.key > 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1618872238/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/858984614/10000 0 Index: ql/src/test/results/clientpositive/input21.q.out =================================================================== --- ql/src/test/results/clientpositive/input21.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input21.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src_null + TableScan + alias: src_null Select Operator expressions: expr: a @@ -59,7 +61,7 @@ query: SELECT * FROM src_null DISTRIBUTE BY c SORT BY d Input: default/src_null -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/125046373/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/497061078/10000 1.0 1 same 0 1.0 1 same 1 1.0 1 same 2 Index: ql/src/test/results/clientpositive/sample3.q.out =================================================================== --- ql/src/test/results/clientpositive/sample3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample3.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: s + TableScan + alias: s Filter Operator predicate: expr: (((hash(key) & 2147483647) % 5) = 0) @@ -42,7 +44,7 @@ query: SELECT s.key FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s SORT BY key Input: default/srcbucket -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1207444582/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/533192725/10000 0 0 0 Index: ql/src/test/results/clientpositive/groupby2_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby2_limit.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -70,7 +72,7 @@ query: SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.key LIMIT 5 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/209868765/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1181116822/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/groupby7_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby7_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby7_map.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -111,7 +113,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/279658140/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/694830610/10004 Reduce Output Operator key expressions: expr: _col0 @@ -175,7 +177,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/191972107/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/278066492/10000 0 0.0 10 10.0 100 200.0 @@ -487,7 +489,7 @@ 98 196.0 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1027947003/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1908409762/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/input_lazyserde.q.out =================================================================== --- ql/src/test/results/clientpositive/input_lazyserde.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_lazyserde.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: lint @@ -78,7 +80,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 DISTRIBUTE BY 1 Input: default/dest1 -Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/604433188/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/54447048/10000 [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -92,7 +94,7 @@ null null null 0 NULL query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1 Input: default/dest1 -Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/848270212/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1845091515/10000 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 @@ -111,7 +113,7 @@ Output: default/dest1 query: SELECT * from dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1928951519/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/482165346/10000 [0,0,0] [1,2,3] [2,4,6] @@ -130,7 +132,7 @@ Output: default/dest1 query: SELECT * from dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1672243759/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/244883137/10000 {"key_0":"value_0"} {"key_1":"value_1"} {"key_2":"value_2"} Index: ql/src/test/results/clientpositive/cluster.q.out =================================================================== --- ql/src/test/results/clientpositive/cluster.q.out (revision 801363) +++ ql/src/test/results/clientpositive/cluster.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(10)) @@ -57,7 +59,7 @@ query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1924758530/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1851665085/10000 10 val_10 query: EXPLAIN SELECT * FROM SRC x where x.key = 20 CLUSTER BY key @@ -73,6 +75,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -118,7 +122,7 @@ query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1123767134/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/577104330/10000 20 val_20 query: EXPLAIN SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key @@ -134,6 +138,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -179,7 +185,7 @@ query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/861786563/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/716161226/10000 20 val_20 query: EXPLAIN SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key @@ -195,6 +201,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -240,7 +248,7 @@ query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1270193772/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1938918851/10000 20 val_20 query: EXPLAIN SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key @@ -256,6 +264,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -301,7 +311,7 @@ query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1442901621/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/111063159/10000 20 val_20 query: EXPLAIN SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key @@ -317,6 +327,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -362,7 +374,7 @@ query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/153851399/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2088645304/10000 20 val_20 query: EXPLAIN SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1 @@ -378,6 +390,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -423,7 +437,7 @@ query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/558964860/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/43650880/10000 20 val_20 query: EXPLAIN SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20 @@ -439,6 +453,8 @@ Map Reduce Alias -> Map Operator Tree: y:x + TableScan + alias: x Select Operator expressions: expr: key @@ -487,7 +503,7 @@ query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1403526844/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2050273150/10000 20 val_20 query: EXPLAIN SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 @@ -504,6 +520,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Reduce Output Operator key expressions: expr: key @@ -517,6 +535,8 @@ expr: key type: string x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -566,7 +586,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/708887411/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/568111874/10002 Reduce Output Operator key expressions: expr: _col1 @@ -599,7 +619,7 @@ query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/102207247/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1907382390/10000 20 val_20 20 query: EXPLAIN SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 @@ -616,6 +636,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Reduce Output Operator key expressions: expr: key @@ -631,6 +653,8 @@ expr: value type: string x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -682,7 +706,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1638014428/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/36559589/10002 Reduce Output Operator key expressions: expr: _col1 @@ -717,7 +741,7 @@ query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1268473862/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/917788585/10000 20 val_20 20 val_20 query: EXPLAIN SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key @@ -734,6 +758,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Reduce Output Operator key expressions: expr: key @@ -749,6 +775,8 @@ expr: value type: string x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -800,7 +828,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2041580763/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1989928864/10002 Reduce Output Operator key expressions: expr: _col0 @@ -835,7 +863,7 @@ query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/781263409/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1491329699/10000 20 val_20 20 val_20 query: EXPLAIN SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key @@ -852,6 +880,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Reduce Output Operator key expressions: expr: key @@ -865,6 +895,8 @@ expr: key type: string x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(20)) @@ -914,7 +946,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/919629862/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1994351843/10002 Reduce Output Operator key expressions: expr: _col0 @@ -947,7 +979,7 @@ query: SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1571336063/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1415898959/10000 20 val_20 20 query: EXPLAIN SELECT unioninput.* @@ -969,6 +1001,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unioninput-subquery1:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -1007,6 +1041,8 @@ expr: _col1 type: string null-subquery2:unioninput-subquery2:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(100)) @@ -1057,6 +1093,7 @@ Fetch Operator limit: -1 + query: SELECT unioninput.* FROM ( FROM src select src.key, src.value WHERE src.key < 100 @@ -1065,7 +1102,7 @@ ) unioninput CLUSTER BY unioninput.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1313522351/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1319156082/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/join4.q.out =================================================================== --- ql/src/test/results/clientpositive/join4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join4.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: c:a:src1 + TableScan + alias: src1 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) @@ -55,6 +57,8 @@ expr: _col1 type: string c:b:src2 + TableScan + alias: src2 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) @@ -162,7 +166,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2073874945/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1353187421/10000 11 val_11 NULL NULL 12 val_12 NULL NULL 12 val_12 NULL NULL Index: ql/src/test/results/clientpositive/input_testxpath3.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_testxpath3.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: mstringstring['key_9'] @@ -35,7 +37,7 @@ query: FROM src_thrift SELECT src_thrift.mstringstring['key_9'], src_thrift.lintstring.myint Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1386485192/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1059185497/10000 NULL [0] NULL [1] NULL [4] Index: ql/src/test/results/clientpositive/input_dynamicserde.q.out =================================================================== --- ql/src/test/results/clientpositive/input_dynamicserde.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_dynamicserde.q.out (working copy) @@ -21,6 +21,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: lint @@ -49,10 +51,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/517762331/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1925142463/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1867126986/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1140895264/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -98,7 +100,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/683760130/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/842958925/10000 [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -112,7 +114,7 @@ null null null 0 NULL query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/189330657/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1145493551/10000 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 Index: ql/src/test/results/clientpositive/input9.q.out =================================================================== --- ql/src/test/results/clientpositive/input9.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input9.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src1 + TableScan + alias: src1 Filter Operator predicate: expr: (null = null) @@ -52,10 +54,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2053307008/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1618745236/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/694414280/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1286113627/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -95,4 +97,4 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1570451077/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1674136764/10000 Index: ql/src/test/results/clientpositive/union17.q.out =================================================================== --- ql/src/test/results/clientpositive/union17.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union17.q.out (working copy) @@ -32,6 +32,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -67,7 +69,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1951605934/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1501890791/10004 Union Reduce Output Operator key expressions: @@ -83,7 +85,7 @@ type: string expr: _col1 type: string - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1951605934/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1501890791/10007 Union Reduce Output Operator key expressions: @@ -135,7 +137,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1951605934/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1501890791/10005 Reduce Output Operator key expressions: expr: _col0 @@ -186,7 +188,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1951605934/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1501890791/10006 Reduce Output Operator key expressions: expr: _col0 @@ -246,6 +248,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -271,7 +275,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1791774870/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/618311700/10000 0 1 10 1 100 1 @@ -584,7 +588,7 @@ tst1 1 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1398773542/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/390182720/10000 0 val_0 1 10 val_10 1 100 val_100 1 Index: ql/src/test/results/clientpositive/join10.q.out =================================================================== --- ql/src/test/results/clientpositive/join10.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join10.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: x:src + TableScan + alias: src Select Operator expressions: expr: key @@ -31,6 +33,8 @@ type: string tag: 0 y:src + TableScan + alias: src Select Operator expressions: expr: key @@ -86,7 +90,7 @@ ON (x.key = Y.key) SELECT Y.* Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1524530660/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1608637001/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input30.q.out =================================================================== --- ql/src/test/results/clientpositive/input30.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input30.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (((hash(rand(UDFToLong(460476415))) & 2147483647) % 32) = 0) @@ -75,7 +77,7 @@ Output: default/tst_dest30 query: select * from tst_dest30 Input: default/tst_dest30 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1981954297/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1316741609/10000 18 query: drop table tst_dest30 query: drop table dest30 Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -31,7 +33,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/923932186/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/402402296/10002 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -47,6 +49,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -61,7 +65,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/923932186/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/402402296/10002 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -71,9 +75,9 @@ escape.delim \ Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -88,7 +92,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src @@ -96,15 +100,20 @@ Map Reduce Alias -> Map Operator Tree: z + TableScan + alias: z Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(hr) = UDFToDouble(11))) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (ds = '2008-04-08') type: boolean Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(hr) = UDFToDouble(11)) type: boolean @@ -120,7 +129,7 @@ value expressions: expr: value type: string - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/923932186/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/402402296/10002 Select Operator expressions: expr: _col0 @@ -146,10 +155,10 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/923932186/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/402402296/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -168,10 +177,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/923932186/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/402402296/10002 Partition input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -200,7 +209,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/682907345/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/190123379/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -214,7 +223,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 @@ -222,7 +231,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/682907345/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/190123379/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -236,10 +245,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/682907345/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/190123379/10001 query: INSERT OVERWRITE TABLE dest_j1 @@ -252,7 +261,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/921057971/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/798519490/10000 146 val_146 val_146 146 val_146 val_146 146 val_146 val_146 Index: ql/src/test/results/clientpositive/udf7.q.out =================================================================== --- ql/src/test/results/clientpositive/udf7.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf7.q.out (working copy) @@ -21,6 +21,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: round(ln(3.0), 12) @@ -93,5 +95,5 @@ POW(2,3), POWER(2,3), POWER(2,-3), POWER(0.5, -3), POWER(4, 0.5), POWER(-1, 0.5), POWER(-1, 2) FROM dest1 Input: default/dest1 -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/1531238271/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1327360407/10000 1.098612288668 NULL NULL 1.098612288668 NULL NULL 1.584962500721 NULL NULL 0.47712125472 NULL NULL 1.584962500721 NULL NULL NULL -1.0 7.389056098931 8.0 8.0 0.125 8.0 2.0 NaN 1.0 Index: ql/src/test/results/clientpositive/udf_locate.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_locate.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_locate.q.out (working copy) @@ -29,6 +29,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: locate('abc''abcd') @@ -98,5 +100,5 @@ locate('abc', 'abcd', 'invalid number') FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/205952342/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/436151757/10000 1 0 2 2 4 4 0 0 2 3 4 2 3 NULL NULL 0 0 Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -19,7 +19,10 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Filter Operator + isSamplingPred: false predicate: expr: (((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-08')) and (hr = '12')) type: boolean @@ -48,7 +51,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10004 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10004 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -62,10 +65,11 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Filter Operator + isSamplingPred: false predicate: expr: (((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-09')) and (hr = '12')) type: boolean @@ -94,7 +98,7 @@ File Output Operator compressed: false GlobalTableId: 2 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10005 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10005 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -108,15 +112,15 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -135,10 +139,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Partition partition values: ds 2008-04-09 @@ -157,7 +161,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -167,11 +171,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10004 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10004 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10004 Reduce Output Operator sort order: Map-reduce partition columns: @@ -189,9 +193,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10004 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10004 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -206,7 +210,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -214,7 +218,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -227,7 +231,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -236,7 +240,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -250,10 +254,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10001 Stage: Stage-8 Conditional Operator @@ -261,11 +265,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10005 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10002 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10005 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10002 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -283,9 +287,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10005 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/538933043/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/169095192/10005 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -300,7 +304,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 Reduce Operator Tree: @@ -308,7 +312,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -321,7 +325,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 @@ -330,7 +334,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10002 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -344,10 +348,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1990075660/10003 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/32810842/10003 query: FROM srcpart @@ -359,7 +363,7 @@ Output: default/dest2 query: SELECT dest1.* FROM dest1 sort by key,value,ds,hr Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/305007713/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1367548594/10000 0 val_0 12 2008-04-08 0 val_0 12 2008-04-08 0 val_0 12 2008-04-08 @@ -446,7 +450,7 @@ 98 val_98 12 2008-04-08 query: SELECT dest2.* FROM dest2 sort by key,value,ds,hr Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1696118746/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1502320344/10000 0 val_0 12 2008-04-09 0 val_0 12 2008-04-09 0 val_0 12 2008-04-09 Index: ql/src/test/results/clientpositive/join18.q.out =================================================================== --- ql/src/test/results/clientpositive/join18.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join18.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: b:src2 + TableScan + alias: src2 Select Operator expressions: expr: key @@ -142,6 +144,8 @@ Map Reduce Alias -> Map Operator Tree: a:src1 + TableScan + alias: src1 Select Operator expressions: expr: key @@ -210,7 +214,7 @@ ON (a.key = b.key) Input: default/src1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1470641651/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1137643842/10000 NULL NULL 7 0 3 NULL NULL 10 1 NULL NULL Index: ql/src/test/results/clientpositive/union2.q.out =================================================================== --- ql/src/test/results/clientpositive/union2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union2.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator expressions: expr: key @@ -36,6 +38,8 @@ expr: _col0 type: bigint null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -82,5 +86,5 @@ query: select count(1) FROM (select s1.key as key, s1.value as value from src s1 UNION ALL select s2.key as key, s2.value as value from src s2) unionsrc Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2117179272/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1378537428/10000 1000 Index: ql/src/test/results/clientpositive/nullgroup4.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/nullgroup4.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(9999)) @@ -66,7 +68,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1323444401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1232529310/10002 Reduce Output Operator sort order: tag: -1 @@ -100,9 +102,10 @@ Fetch Operator limit: -1 + query: select count(1), count(distinct x.value) from src x where x.key = 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1255708315/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1706398248/10000 0 0 query: explain select count(1), count(distinct x.value) from src x where x.key = 9999 @@ -118,6 +121,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(9999)) @@ -179,7 +184,7 @@ query: select count(1), count(distinct x.value) from src x where x.key = 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1506391866/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2036729644/10000 0 0 query: explain select count(1), count(distinct x.value) from src x where x.key = 9999 @@ -196,6 +201,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(9999)) @@ -238,7 +245,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1355680596/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/441797798/10002 Reduce Output Operator sort order: tag: -1 @@ -275,7 +282,7 @@ query: select count(1), count(distinct x.value) from src x where x.key = 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/474355320/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1197695098/10000 0 0 query: explain select count(1), count(distinct x.value) from src x where x.key = 9999 @@ -291,6 +298,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(9999)) @@ -341,5 +350,5 @@ query: select count(1), count(distinct x.value) from src x where x.key = 9999 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1587094101/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1769543362/10000 0 0 Index: ql/src/test/results/clientpositive/ppd_transform.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_transform.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_transform.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src + TableScan + alias: src Select Operator expressions: expr: key @@ -77,7 +79,7 @@ ) tmap SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1793530445/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/683513848/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/udf_space.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_space.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_space.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: space(10) @@ -51,7 +53,7 @@ length(space(-100)) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_0/build/ql/tmp/1380484174/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1933263714/10000 10 0 1 0 0 query: SELECT space(10), @@ -61,5 +63,5 @@ space(-100) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_0/build/ql/tmp/2091099009/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1506040110/10000 Index: ql/src/test/results/clientpositive/mapreduce5.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce5.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -92,7 +94,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1928799393/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/197645124/10000 490 49 0 val_490 491 49 1 val_491 492 49 2 val_492 Index: ql/src/test/results/clientpositive/subq2.q.out =================================================================== --- ql/src/test/results/clientpositive/subq2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/subq2.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: a:b + TableScan + alias: b Filter Operator predicate: expr: (UDFToDouble(key) >= UDFToDouble(90)) @@ -86,7 +88,7 @@ FROM (SELECT b.key as k, count(1) as c FROM src b GROUP BY b.key) a WHERE a.k >= 90 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1833412775/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1671058599/10000 100 2 103 2 104 2 Index: ql/src/test/results/clientpositive/udf_instr.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_instr.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_instr.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: instr('abcd''abc') @@ -82,5 +84,5 @@ instr('abcd', null) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/548502643/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1911960579/10000 1 0 2 2 0 0 2 3 4 2 3 NULL NULL Index: ql/src/test/results/clientpositive/groupby3_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby3_noskew.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -109,7 +111,6 @@ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - query: FROM src INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,5)), @@ -125,6 +126,6 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/537230860/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2134252669/10000 130091.0 260.182 256.10355987055016 98.0 0.0 142.92680950752379 143.06995106518903 20428.072875999995 20469.010897795586 query: DROP TABLE dest1 Index: ql/src/test/results/clientpositive/ppd1.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd1.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (key > '2') @@ -39,7 +41,7 @@ query: SELECT src.key as c3 from src where src.key > '2' Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1708124764/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1622609719/10000 238 86 311 Index: ql/src/test/results/clientpositive/input24.q.out =================================================================== --- ql/src/test/results/clientpositive/input24.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input24.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (d = '2009-01-01') @@ -60,6 +62,6 @@ query: select count(1) from tst x where x.d='2009-01-01' Input: default/tst/d=2009-01-01 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/914060511/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1878605126/10000 0 query: drop table tst Index: ql/src/test/results/clientpositive/groupby8_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby8_map_skew.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Reduce Output Operator key expressions: expr: substr(value, 5) @@ -65,7 +67,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/405462104/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1626240365/10004 Reduce Output Operator key expressions: expr: _col0 @@ -123,7 +125,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/405462104/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1626240365/10005 Reduce Output Operator key expressions: expr: _col0 @@ -187,7 +189,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1228344007/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/195494600/10000 0 1 10 1 100 1 @@ -499,7 +501,7 @@ 98 1 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1951309908/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1605077446/10000 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/implicit_cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/implicit_cast1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/implicit_cast1.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: implicit_test1 + TableScan + alias: implicit_test1 Filter Operator predicate: expr: (UDFToDouble(a) <> UDFToDouble(0)) @@ -46,5 +48,5 @@ FROM implicit_test1 WHERE implicit_test1.a <> 0 Input: default/implicit_test1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2080135303/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/915534198/10000 query: DROP TABLE implicit_test1 Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -16,11 +16,15 @@ Map Reduce Alias -> Map Operator Tree: s + TableScan + alias: s Filter Operator + isSamplingPred: false predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean @@ -34,7 +38,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/24110695/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/237880152/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -48,14 +52,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -71,7 +75,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket @@ -81,11 +85,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/24110695/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/315709581/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/237880152/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/871659719/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/24110695/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/237880152/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -99,9 +103,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/24110695/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/237880152/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/24110695/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/237880152/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -116,7 +120,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -124,7 +128,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/315709581/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/871659719/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -137,7 +141,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -146,7 +150,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/315709581/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/871659719/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -160,10 +164,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/315709581/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/871659719/10001 query: INSERT OVERWRITE TABLE dest1 SELECT s.* @@ -172,7 +176,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1280513790/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/899553837/10000 468 val_469 272 val_273 448 val_449 Index: ql/src/test/results/clientpositive/join_hive_626.q.out =================================================================== --- ql/src/test/results/clientpositive/join_hive_626.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join_hive_626.q.out (working copy) @@ -28,6 +28,8 @@ Map Reduce Alias -> Map Operator Tree: hive_foo + TableScan + alias: hive_foo Reduce Output Operator key expressions: expr: foo_id @@ -41,6 +43,8 @@ expr: foo_name type: string hive_bar + TableScan + alias: hive_bar Reduce Output Operator key expressions: expr: foo_id @@ -89,6 +93,8 @@ expr: _col10 type: string hive_count + TableScan + alias: hive_count Reduce Output Operator key expressions: expr: bar_id @@ -135,7 +141,7 @@ Input: default/hive_foo Input: default/hive_count Input: default/hive_bar -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/837595619/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/168017101/10000 foo1 bar10 2 query: drop table hive_foo query: drop table hive_bar Index: ql/src/test/results/clientpositive/groupby4_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby4_map.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator Group By Operator aggregations: @@ -66,5 +68,5 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/225285836/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/660625663/10000 500 Index: ql/src/test/results/clientpositive/join27.q.out =================================================================== --- ql/src/test/results/clientpositive/join27.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join27.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -71,6 +73,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -124,10 +128,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/478661829/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/246455623/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/703862692/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1006346287/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -171,7 +175,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key, x.value Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1580756580/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2119520959/10000 NULL val_165 val_165 NULL val_165 val_165 NULL val_193 val_193 Index: ql/src/test/results/clientpositive/udf_coalesce.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_coalesce.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_coalesce.q.out (working copy) @@ -30,6 +30,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: COALESCE(1) @@ -102,7 +104,7 @@ COALESCE(IF(TRUE, NULL, 0), NULL) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/933961022/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/928019024/10000 1 1 2 1 3 4 1 1 2 1 3 4 1.0 1.0 2.0 2.0 2.0 NULL query: EXPLAIN SELECT COALESCE(src_thrift.lint[1], 999), @@ -121,6 +123,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: COALESCE(lint[1],999) @@ -147,7 +151,7 @@ COALESCE(src_thrift.mstringstring['key_2'], '999') FROM src_thrift Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/818393272/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/826777318/10000 0 0 999 2 1 999 4 8 value_2 Index: ql/src/test/results/clientpositive/udf_length.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_length.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_length.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src1 + TableScan + alias: src1 Select Operator expressions: expr: length(value) @@ -33,10 +35,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/768163578/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2113573167/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/739685624/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1544905742/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -73,7 +75,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/99763731/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/966501876/10000 7 0 7 @@ -116,6 +118,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: length(name) @@ -135,6 +139,6 @@ query: SELECT length(dest1.name) FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1621567152/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1105559783/10000 2 query: DROP TABLE dest1 Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 0) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 0) @@ -0,0 +1,827 @@ +query: EXPLAIN EXTENDED + FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Filter Operator + isSamplingPred: false + predicate: + expr: (((ds = '2008-04-08') and (UDFToDouble(key) > UDFToDouble(15))) and (UDFToDouble(key) < UDFToDouble(25))) + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + Reduce Operator Tree: + Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col2, _col3 + Filter Operator + isSamplingPred: false + predicate: + expr: ((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col2) > UDFToDouble(15))) and (UDFToDouble(_col2) < UDFToDouble(25))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1982665597/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/src +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/230534559/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 +query: EXPLAIN EXTENDED + FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF srcpart a) (TOK_TABREF src b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Filter Operator + isSamplingPred: false + predicate: + expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + a + TableScan + alias: a + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col4, _col5 + Filter Operator + isSamplingPred: false + predicate: + expr: ((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col4) > UDFToDouble(15))) and (UDFToDouble(_col4) < UDFToDouble(25))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2065246684/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +Input: default/src +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919542948/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 +query: EXPLAIN EXTENDED + FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Filter Operator + isSamplingPred: false + predicate: + expr: (((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) and (ds = '2008-04-08')) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + Reduce Operator Tree: + Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Filter Operator + isSamplingPred: false + predicate: + expr: (((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col2) > UDFToDouble(15))) and (UDFToDouble(_col2) < UDFToDouble(25))) and (_col4 = '2008-04-08')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/463243594/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/src +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1737067782/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 +query: EXPLAIN EXTENDED + FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08' +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF srcpart a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Filter Operator + isSamplingPred: false + predicate: + expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + Partition + partition values: + ds 2008-04-09 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Partition + partition values: + ds 2008-04-09 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col2, _col4, _col5 + Filter Operator + isSamplingPred: false + predicate: + expr: (((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col4) > UDFToDouble(15))) and (UDFToDouble(_col4) < UDFToDouble(25))) and (_col2 = '2008-04-08')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/549146621/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08' +Input: default/src +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/srcpart/ds=2008-04-09/hr=11 +Input: default/srcpart/ds=2008-04-09/hr=12 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1462270899/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 Index: ql/src/test/results/clientpositive/ppd_random.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_random.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_random.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Filter Operator predicate: expr: (key > '2') @@ -46,6 +48,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Select Operator expressions: expr: key Index: ql/src/test/results/clientpositive/join7.q.out =================================================================== --- ql/src/test/results/clientpositive/join7.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join7.q.out (working copy) @@ -30,6 +30,8 @@ Map Reduce Alias -> Map Operator Tree: c:a:src1 + TableScan + alias: src1 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) @@ -60,6 +62,8 @@ expr: _col1 type: string c:b:src2 + TableScan + alias: src2 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) @@ -90,6 +94,8 @@ expr: _col1 type: string c:c:src3 + TableScan + alias: src3 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(20)) and (UDFToDouble(key) < UDFToDouble(25))) @@ -216,7 +222,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1493295879/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1132812808/10000 11 val_11 NULL NULL NULL NULL 12 val_12 NULL NULL NULL NULL 12 val_12 NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/union12.q.out =================================================================== --- ql/src/test/results/clientpositive/union12.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union12.q.out (working copy) @@ -31,6 +31,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1:unionsrc-subquery1-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -66,7 +68,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/183213768/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/317852044/10002 Union Select Operator expressions: @@ -90,7 +92,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/183213768/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/317852044/10004 Union Select Operator expressions: @@ -114,7 +116,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/183213768/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/317852044/10005 Union Select Operator expressions: @@ -145,10 +147,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/987914812/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/847490196/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/183213768/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/317852044/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -185,6 +187,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery2:unionsrc-subquery1-subquery2:s2 + TableScan + alias: s2 Select Operator Group By Operator aggregations: @@ -221,6 +225,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s3 + TableScan + alias: s3 Select Operator Group By Operator aggregations: @@ -266,7 +272,7 @@ Output: default/tmptable query: select * from tmptable x sort by x.key Input: default/tmptable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/93419450/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/983094247/10000 tst1 500 tst2 25 tst3 1000 Index: ql/src/test/results/clientpositive/groupby1_map_nomap.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -88,7 +90,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1003792766/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/200206031/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/create_genericudaf.q.out =================================================================== --- ql/src/test/results/clientpositive/create_genericudaf.q.out (revision 801363) +++ ql/src/test/results/clientpositive/create_genericudaf.q.out (working copy) @@ -28,6 +28,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -78,6 +80,6 @@ test_avg(substr(value,5)) FROM src Input: default/src -Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1744650408/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1700649603/10000 1.0 260.182 query: DROP TEMPORARY FUNCTIOn test_avg Index: ql/src/test/results/clientpositive/udf2.q.out =================================================================== --- ql/src/test/results/clientpositive/udf2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf2.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: '|' @@ -47,5 +49,5 @@ query: SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1008388260/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1661443030/10000 | abc | abc | abc | Index: ql/src/test/results/clientpositive/join13.q.out =================================================================== --- ql/src/test/results/clientpositive/join13.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join13.q.out (working copy) @@ -21,6 +21,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -43,6 +45,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -102,6 +106,8 @@ expr: _col0 type: string src3:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(200)) @@ -161,7 +167,7 @@ (SELECT src.key as c5, src.value as c6 from src) src3 ON src1.c1 + src2.c3 = src3.c5 AND src3.c5 < 200 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/259098425/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1605427587/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/groupby7_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby7_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby7_map_skew.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -87,7 +89,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/319307034/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1750763372/10004 Reduce Output Operator key expressions: expr: _col0 @@ -145,7 +147,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/319307034/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1750763372/10005 Reduce Output Operator key expressions: expr: _col0 @@ -177,7 +179,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/319307034/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1750763372/10006 Reduce Output Operator key expressions: expr: _col0 @@ -241,7 +243,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/362951911/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/682338522/10000 0 0.0 10 10.0 100 200.0 @@ -553,7 +555,7 @@ 98 196.0 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1043792398/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/495752431/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/groupby8_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby8_noskew.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Reduce Output Operator key expressions: expr: substr(value, 5) @@ -65,7 +67,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/121204644/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1854610071/10004 Reduce Output Operator key expressions: expr: _col0 @@ -123,7 +125,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/121204644/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1854610071/10005 Reduce Output Operator key expressions: expr: _col0 @@ -187,7 +189,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2074527502/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/208463828/10000 0 1 10 1 100 1 @@ -499,7 +501,7 @@ 98 1 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/572299908/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/621035645/10000 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/join36.q.out =================================================================== --- ql/src/test/results/clientpositive/join36.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join36.q.out (working copy) @@ -29,6 +29,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -74,6 +76,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -118,10 +122,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/33280677/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/576445655/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1096000115/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1356744320/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -165,7 +169,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1259857604/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/631598409/10000 0 3 3 2 1 1 4 1 1 Index: ql/src/test/results/clientpositive/input18.q.out =================================================================== --- ql/src/test/results/clientpositive/input18.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input18.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src + TableScan + alias: src Select Operator expressions: expr: key @@ -100,7 +102,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1602054663/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1817619374/10000 0 val_0+3+7 0 val_0+3+7 0 val_0+3+7 Index: ql/src/test/results/clientpositive/groupby2.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby2.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -55,7 +57,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/98786317/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1035591703/10002 Reduce Output Operator key expressions: expr: _col0 @@ -124,7 +126,7 @@ Output: default/dest_g2 query: SELECT dest_g2.* FROM dest_g2 Input: default/dest_g2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/376809666/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/33506140/10000 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/input_part5.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_part5.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(key) < UDFToDouble(100))) @@ -50,10 +52,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1070846773/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1321538842/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1822724007/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/378526421/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -98,7 +100,7 @@ Output: default/tmptable query: select * from tmptable x sort by x.key,x.value,x.ds,x.hr Input: default/tmptable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/994747882/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919037456/10000 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 0 val_0 2008-04-08 11 Index: ql/src/test/results/clientpositive/union5.q.out =================================================================== --- ql/src/test/results/clientpositive/union5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union5.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -54,7 +56,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1466934742/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/418741638/10002 Union Select Operator expressions: @@ -81,7 +83,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1466934742/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/418741638/10003 Union Select Operator expressions: @@ -135,6 +137,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator Group By Operator aggregations: @@ -176,6 +180,6 @@ UNION ALL select 'tst2' as key, count(1) as value from src s2) unionsrc group by unionsrc.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1264889595/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1640354803/10000 tst1 1 tst2 1 Index: ql/src/test/results/clientpositive/input2_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input2_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input2_limit.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(300)) @@ -42,7 +44,7 @@ query: SELECT x.* FROM SRC x WHERE x.key < 300 LIMIT 5 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/689430698/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/515715119/10000 238 val_238 86 val_86 27 val_27 Index: ql/src/test/results/clientpositive/union21.q.out =================================================================== --- ql/src/test/results/clientpositive/union21.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union21.q.out (working copy) @@ -26,6 +26,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery2:union_output-subquery1-subquery2:src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: astring @@ -58,6 +60,8 @@ expr: _col1 type: bigint null-subquery1-subquery1-subquery2:union_output-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -90,6 +94,8 @@ expr: _col1 type: bigint null-subquery1-subquery1-subquery1-subquery1:union_output-subquery1-subquery1-subquery1-subquery1:src + TableScan + alias: src Select Operator expressions: expr: '1' @@ -122,6 +128,8 @@ expr: _col1 type: bigint null-subquery2:union_output-subquery2:src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: lstring[0] @@ -154,6 +162,8 @@ expr: _col1 type: bigint null-subquery1-subquery1-subquery1-subquery2:union_output-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: reverse(key) @@ -228,7 +238,7 @@ GROUP BY key Input: default/src_thrift Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/239888968/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1135321195/10000 NULL 2 0 7 001 2 Index: ql/src/test/results/clientpositive/mapreduce8.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce8.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce8.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -109,7 +111,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1533727710/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/16852304/10000 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -17,11 +17,15 @@ Map Reduce Alias -> Map Operator Tree: s + TableScan + alias: s Filter Operator + isSamplingPred: true predicate: expr: (((hash(rand()) & 2147483647) % 1) = 0) type: boolean Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (hr = '11')) type: boolean @@ -50,7 +54,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1583422790/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -64,14 +68,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -90,7 +94,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -100,11 +104,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1583422790/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1605336666/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1583422790/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -122,9 +126,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1583422790/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1583422790/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -139,7 +143,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -147,7 +151,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1605336666/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -160,7 +164,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -169,7 +173,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1605336666/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -183,10 +187,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1605336666/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10001 query: INSERT OVERWRITE TABLE dest1 SELECT s.* @@ -196,7 +200,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1872171201/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/128296418/10000 238 val_238 2008-04-08 11 86 val_86 2008-04-08 11 311 val_311 2008-04-08 11 @@ -699,5 +703,5 @@ 97 val_97 2008-04-08 11 query: select count(1) from srcbucket Input: default/srcbucket -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/594352691/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/822380674/10000 1000 Index: ql/src/test/results/clientpositive/join22.q.out =================================================================== --- ql/src/test/results/clientpositive/join22.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join22.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src5:src3:src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -23,6 +25,8 @@ type: string tag: 1 src5:src3:src1 + TableScan + alias: src1 Reduce Output Operator key expressions: expr: key @@ -76,6 +80,8 @@ expr: _col3 type: string src5:src4 + TableScan + alias: src4 Reduce Output Operator key expressions: expr: key Index: ql/src/test/results/clientpositive/groupby6_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby6_map_skew.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -52,7 +54,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/338642133/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1400626799/10002 Reduce Output Operator key expressions: expr: _col0 @@ -100,7 +102,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1181271396/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1699461209/10000 0 1 2 Index: ql/src/test/results/clientpositive/ppd_outer_join3.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_outer_join3.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Reduce Output Operator key expressions: expr: key @@ -33,6 +35,8 @@ expr: value type: string a + TableScan + alias: a Reduce Output Operator key expressions: expr: key @@ -90,7 +94,7 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/339488441/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1232779923/10000 150 val_150 150 val_150 152 val_152 152 val_152 152 val_152 152 val_152 Index: ql/src/test/results/clientpositive/groupby1_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby1_map.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -88,7 +90,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/650056033/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1144777415/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/groupby1_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby1_limit.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -68,7 +70,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1076473475/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1173614638/10002 Reduce Output Operator sort order: tag: -1 @@ -112,7 +114,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1546240918/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/197588214/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/sample9.q.out =================================================================== --- ql/src/test/results/clientpositive/sample9.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample9.q.out (working copy) @@ -13,11 +13,15 @@ Map Reduce Alias -> Map Operator Tree: s:a + TableScan + alias: a Filter Operator + isSamplingPred: false predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean @@ -38,7 +42,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1998091453/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/600393334/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -48,9 +52,9 @@ columns.types int:string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -66,7 +70,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket @@ -78,7 +82,7 @@ query: SELECT s.* FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s Input: default/srcbucket -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/974126813/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1923094099/10000 474 val_475 62 val_63 468 val_469 Index: ql/src/test/results/clientpositive/cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/cast1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/cast1.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(86)) @@ -54,10 +56,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/811676056/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/519069919/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2035347058/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/164262834/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -106,5 +108,5 @@ Output: default/dest1 query: select dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/855406378/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1857264488/10000 5 5.0 5.0 5.0 5 true 1 Index: ql/src/test/results/clientpositive/quote1.q.out =================================================================== --- ql/src/test/results/clientpositive/quote1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/quote1.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: ((UDFToDouble(key) >= UDFToDouble(200)) and (UDFToDouble(key) < UDFToDouble(300))) @@ -52,10 +54,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/801828487/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/963537398/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1251238086/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/319542195/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -105,6 +107,8 @@ Map Reduce Alias -> Map Operator Tree: int + TableScan + alias: int Filter Operator predicate: expr: (table = '2008-04-08') @@ -140,7 +144,7 @@ Output: default/dest1/table=2008-04-08 query: SELECT `int`.`location`, `int`.`type`, `int`.`table` FROM dest1 `int` WHERE `int`.`table` = '2008-04-08' Input: default/dest1/table=2008-04-08 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1856384847/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1560089475/10000 238 val_238 2008-04-08 255 val_255 2008-04-08 278 val_278 2008-04-08 Index: ql/src/test/results/clientpositive/notable_alias2.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/notable_alias2.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -100,7 +102,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/339578948/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1898241418/10000 1234 0 3.0 1234 10 1.0 1234 11 1.0 Index: ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out =================================================================== --- ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out (revision 801363) +++ ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: lint @@ -73,7 +75,7 @@ Output: default/columnarserde_create_shortcut query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1 Input: default/columnarserde_create_shortcut -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2113912969/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1249909626/10000 [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -87,7 +89,7 @@ null null {} 0 NULL query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1 Input: default/columnarserde_create_shortcut -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1302632919/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/467882709/10000 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 @@ -111,7 +113,7 @@ value string from deserializer query: SELECT columnShortcutTable.* FROM columnShortcutTable Input: default/columnshortcuttable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/756410828/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2017736818/10000 238 val_238 86 val_86 311 val_311 @@ -125,7 +127,7 @@ query: ALTER TABLE columnShortcutTable ADD COLUMNS (c string) query: SELECT columnShortcutTable.* FROM columnShortcutTable Input: default/columnshortcuttable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/656879171/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/984890513/10000 238 val_238 NULL 86 val_86 NULL 311 val_311 NULL @@ -139,7 +141,7 @@ query: ALTER TABLE columnShortcutTable REPLACE COLUMNS (key int) query: SELECT columnShortcutTable.* FROM columnShortcutTable Input: default/columnshortcuttable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/90567567/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1805361378/10000 238 86 311 Index: ql/src/test/results/clientpositive/join2.q.out =================================================================== --- ql/src/test/results/clientpositive/join2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join2.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -28,6 +30,8 @@ expr: key type: string src1 + TableScan + alias: src1 Reduce Output Operator key expressions: expr: key @@ -72,6 +76,8 @@ expr: _col0 type: string src3 + TableScan + alias: src3 Reduce Output Operator key expressions: expr: UDFToDouble(key) @@ -132,7 +138,7 @@ Output: default/dest_j2 query: SELECT dest_j2.* FROM dest_j2 Input: default/dest_j2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/1406983217/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/558120835/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/udf_lpad_rpad.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_lpad_rpad.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_lpad_rpad.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: lpad('hi', 1, '?') @@ -43,7 +45,7 @@ lpad('hi', 6, '123') FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_2/build/ql/tmp/1193063785/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/440914656/10000 h ...hi 1231hi query: EXPLAIN SELECT rpad('hi', 1, '?'), @@ -62,6 +64,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: rpad('hi', 1, '?') @@ -90,5 +94,5 @@ rpad('hi', 6, '123') FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_2/build/ql/tmp/1051346108/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1578755009/10000 h hi... hi1231 Index: ql/src/test/results/clientpositive/input7.q.out =================================================================== --- ql/src/test/results/clientpositive/input7.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input7.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src1 + TableScan + alias: src1 Select Operator expressions: expr: null @@ -44,10 +46,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1246196954/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1105502435/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2070937236/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1662408362/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -87,7 +89,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1350143879/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/136163075/10000 NULL 238 NULL NULL NULL 311 Index: ql/src/test/results/clientpositive/ppd_constant_expr.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_constant_expr.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_constant_expr.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src1 + TableScan + alias: src1 Select Operator expressions: expr: (UDFToDouble(4) + null) @@ -48,10 +50,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1808398705/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/194859935/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1179440766/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1903200653/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -93,7 +95,7 @@ Output: default/ppd_constant_expr query: SELECT ppd_constant_expr.* FROM ppd_constant_expr Input: default/ppd_constant_expr -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2078786594/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/869842043/10000 NULL NULL NULL NULL NULL NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy) @@ -13,7 +13,10 @@ Map Reduce Alias -> Map Operator Tree: a + TableScan + alias: a Filter Operator + isSamplingPred: false predicate: expr: ((((rand(UDFToLong(1)) < 0.1) and (ds = '2008-04-08')) and not ((UDFToDouble(key) > UDFToDouble(50)) or (UDFToDouble(key) < UDFToDouble(10)))) and (hr like '%2')) type: boolean @@ -31,7 +34,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1921119763/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/330469412/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -41,9 +44,9 @@ columns.types string:string:string:string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -62,7 +65,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -73,7 +76,7 @@ query: select a.* from srcpart a where rand(1) < 0.1 and a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2' Input: default/srcpart/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/353265726/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1434721309/10000 42 val_42 2008-04-08 12 44 val_44 2008-04-08 12 26 val_26 2008-04-08 12 @@ -93,11 +96,15 @@ Map Reduce Alias -> Map Operator Tree: a + TableScan + alias: a Filter Operator + isSamplingPred: false predicate: expr: (((ds = '2008-04-08') and not ((UDFToDouble(key) > UDFToDouble(50)) or (UDFToDouble(key) < UDFToDouble(10)))) and (hr like '%2')) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (((ds = '2008-04-08') and not ((UDFToDouble(key) > UDFToDouble(50)) or (UDFToDouble(key) < UDFToDouble(10)))) and (hr like '%2')) type: boolean @@ -115,7 +122,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/140562086/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1155307823/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -125,9 +132,9 @@ columns.types string:string:string:string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -146,7 +153,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -157,7 +164,7 @@ query: select a.* from srcpart a where a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2' Input: default/srcpart/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/671022462/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2074930969/10000 27 val_27 2008-04-08 12 37 val_37 2008-04-08 12 15 val_15 2008-04-08 12 Index: ql/src/test/results/clientpositive/union15.q.out =================================================================== --- ql/src/test/results/clientpositive/union15.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union15.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1:unionsrc-subquery1-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -58,7 +60,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1035950223/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2063466051/10002 Union Select Operator expressions: @@ -85,7 +87,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1035950223/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2063466051/10003 Union Select Operator expressions: @@ -112,7 +114,7 @@ value expressions: expr: _col1 type: bigint - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1035950223/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2063466051/10004 Union Select Operator expressions: @@ -166,6 +168,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery2:unionsrc-subquery1-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -184,6 +188,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s3 + TableScan + alias: s3 Select Operator expressions: expr: key @@ -210,7 +216,7 @@ select s3.key as key, s3.value as value from src1 s3) unionsrc group by unionsrc.key Input: default/src Input: default/src1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/878593490/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1243316410/10000 20 128 2 146 2 Index: ql/src/test/results/clientpositive/groupby2_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby2_noskew.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -86,7 +88,7 @@ Output: default/dest_g2 query: SELECT dest_g2.* FROM dest_g2 Input: default/dest_g2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1336300297/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2078044703/10000 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/groupby5_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby5_map_skew.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -70,5 +72,5 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/949792340/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2100680851/10000 130091 Index: ql/src/test/results/clientpositive/input13.q.out =================================================================== --- ql/src/test/results/clientpositive/input13.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input13.q.out (working copy) @@ -26,6 +26,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -122,10 +124,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/670802827/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1557476151/10007 Reduce Output Operator sort order: Map-reduce partition columns: @@ -164,10 +166,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/670802827/10002 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1557476151/10008 Reduce Output Operator sort order: Map-reduce partition columns: @@ -206,10 +208,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10004 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/670802827/10004 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1557476151/10009 Reduce Output Operator sort order: Map-reduce partition columns: @@ -249,10 +251,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10006 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/670802827/10006 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1557476151/10010 Reduce Output Operator sort order: Map-reduce partition columns: @@ -290,7 +292,7 @@ Output: ../build/ql/test/data/warehouse/dest4.out query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1733324289/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/881401051/10000 86 val_86 27 val_27 98 val_98 @@ -377,7 +379,7 @@ 97 val_97 query: SELECT dest2.* FROM dest2 Input: default/dest2 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1779971956/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2139886551/10000 165 val_165 193 val_193 150 val_150 @@ -485,7 +487,7 @@ 169 val_169 query: SELECT dest3.* FROM dest3 Input: default/dest3/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1888647376/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1615208828/10000 238 2008-04-08 12 255 2008-04-08 12 278 2008-04-08 12 Index: ql/src/test/results/clientpositive/udf5.q.out =================================================================== --- ql/src/test/results/clientpositive/udf5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf5.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: from_unixtime(1226446340) @@ -49,5 +51,5 @@ query: SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446340)), day('2008-11-01'), month('2008-11-01'), year('2008-11-01'), day('2008-11-01 15:32:20'), month('2008-11-01 15:32:20'), year('2008-11-01 15:32:20') FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1374597747/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1723928135/10000 2008-11-11 15:32:20 2008-11-11 1 11 2008 1 11 2008 Index: ql/src/test/results/clientpositive/join31.q.out =================================================================== --- ql/src/test/results/clientpositive/join31.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join31.q.out (working copy) @@ -24,6 +24,8 @@ Map Reduce Alias -> Map Operator Tree: subq2:y + TableScan + alias: y Select Operator expressions: expr: key @@ -73,7 +75,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/962829162/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/733202123/10002 Common Join Operator condition map: Inner Join 0 to 1 @@ -94,11 +96,11 @@ Local Work: Map Reduce Local Work Alias -> Map Local Tables: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/962829162/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/733202123/10004 Fetch Operator limit: -1 Alias -> Map Local Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/962829162/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/733202123/10004 Common Join Operator condition map: Inner Join 0 to 1 @@ -120,7 +122,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/962829162/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/733202123/10003 Select Operator expressions: expr: _col0 @@ -197,6 +199,8 @@ Map Reduce Alias -> Map Operator Tree: subq1:x + TableScan + alias: x Select Operator expressions: expr: key @@ -254,7 +258,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1509412937/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2103472034/10000 128 1 146 1 150 1 Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 0) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 0) @@ -0,0 +1,208 @@ +query: CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE +query: EXPLAIN EXTENDED +FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1) +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) ds) '2008-04-08')) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + TableScan + alias: src + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(DISTINCT substr(value, 5)) + expr: sum(substr(value, 5)) + keys: + expr: substr(key, 1, 1) + type: string + expr: substr(value, 5) + type: string + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + expr: _col1 + type: string + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint + expr: _col3 + type: double + Needs Tagging: false + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Group By Operator + aggregations: + expr: count(DISTINCT KEY._col1) + expr: sum(VALUE._col1) + keys: + expr: KEY._col0 + type: string + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + expr: concat(_col0, UDFToString(_col2)) + type: string + outputColumnNames: _col0, _col1, _col2 + Select Operator + expressions: + expr: _col0 + type: string + expr: UDFToInteger(_col1) + type: int + expr: _col2 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/972766884/10000 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest1 + columns.types string:int:string + serialization.ddl struct dest1 { string key, i32 c1, string c2} + serialization.format 1 + columns key,c1,c2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: true + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/972766884/10000 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name dest1 + columns.types string:int:string + serialization.ddl struct dest1 { string key, i32 c1, string c2} + serialization.format 1 + columns key,c1,c2 + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: dest1 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/972766884/10001 + + +query: FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1) +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Output: default/dest1 +query: SELECT dest1.* FROM dest1 +Input: default/dest1 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/161443012/10000 +0 1 00.0 +1 71 132828.0 +2 69 251142.0 +3 62 364008.0 +4 74 4105526.0 +5 6 5794.0 +6 5 6796.0 +7 6 71470.0 +8 8 81524.0 +9 7 92094.0 Index: ql/src/test/results/clientpositive/join16.q.out =================================================================== --- ql/src/test/results/clientpositive/join16.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join16.q.out (working copy) @@ -11,6 +11,8 @@ Map Reduce Alias -> Map Operator Tree: subq:a + TableScan + alias: a Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) > UDFToDouble(20))) @@ -47,6 +49,8 @@ expr: _col0 type: string tab + TableScan + alias: tab Filter Operator predicate: expr: (UDFToDouble(value) < UDFToDouble(200)) Index: ql/src/test/results/clientpositive/nullgroup2.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/nullgroup2.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -65,7 +67,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/852082885/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/81167502/10002 Reduce Output Operator key expressions: expr: _col0 @@ -108,7 +110,7 @@ query: select x.key, count(1) from src x where x.key > 9999 group by x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1261853216/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/45709324/10000 query: explain select x.key, count(1) from src x where x.key > 9999 group by x.key ABSTRACT SYNTAX TREE: @@ -123,6 +125,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -186,7 +190,7 @@ query: select x.key, count(1) from src x where x.key > 9999 group by x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1355499673/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/996927786/10000 query: explain select x.key, count(1) from src x where x.key > 9999 group by x.key ABSTRACT SYNTAX TREE: @@ -202,6 +206,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -246,7 +252,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/905962313/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/912483231/10002 Reduce Output Operator key expressions: expr: _col0 @@ -289,7 +295,7 @@ query: select x.key, count(1) from src x where x.key > 9999 group by x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1233113127/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/75894632/10000 query: explain select x.key, count(1) from src x where x.key > 9999 group by x.key ABSTRACT SYNTAX TREE: @@ -304,6 +310,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(9999)) @@ -359,4 +367,4 @@ query: select x.key, count(1) from src x where x.key > 9999 group by x.key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/808851154/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1160900647/10000 Index: ql/src/test/results/clientpositive/groupby5.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby5.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -55,7 +57,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1116257578/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1836517958/10002 Reduce Output Operator key expressions: expr: _col0 @@ -119,7 +121,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1251278214/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/472765000/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/union8.q.out =================================================================== --- ql/src/test/results/clientpositive/union8.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union8.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1:unionsrc-subquery1-subquery1:s1 + TableScan + alias: s1 Select Operator expressions: expr: key @@ -38,6 +40,8 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery1-subquery2:unionsrc-subquery1-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -60,6 +64,8 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unionsrc-subquery2:s3 + TableScan + alias: s3 Select Operator expressions: expr: key @@ -91,7 +97,7 @@ select s2.key as key, s2.value as value from src s2 UNION ALL select s3.key as key, s3.value as value from src s3) unionsrc Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/397719358/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/291748192/10000 238 val_238 238 val_238 238 val_238 Index: ql/src/test/results/clientpositive/mapreduce3.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce3.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -92,7 +94,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1051296318/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1654922670/10000 0 0 0 val_0 0 0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/order2.q.out =================================================================== --- ql/src/test/results/clientpositive/order2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/order2.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: subq:x + TableScan + alias: x Select Operator expressions: expr: key @@ -62,7 +64,7 @@ (SELECT x.* FROM SRC x ORDER BY key limit 10) subq where subq.key < 10 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/509337607/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/899139587/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input11_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input11_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input11_limit.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -74,7 +76,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/469291801/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1979379952/10000 86 val_86 27 val_27 98 val_98 Index: ql/src/test/results/clientpositive/groupby4_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby4_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby4_map_skew.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator Group By Operator aggregations: @@ -66,5 +68,5 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1894553109/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1299647680/10000 500 Index: ql/src/test/results/clientpositive/ppd_join.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_join.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Filter Operator predicate: expr: ((key > '2') and (key <> '4')) @@ -48,6 +50,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) @@ -120,7 +124,7 @@ ON src1.c1 = src2.c3 AND src1.c1 < '400' WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4') Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/694942086/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1128518264/10000 200 val_200 200 val_200 200 val_200 Index: ql/src/test/results/clientpositive/input22.q.out =================================================================== --- ql/src/test/results/clientpositive/input22.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input22.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: a:input4 + TableScan + alias: input4 Select Operator expressions: expr: key @@ -56,7 +58,7 @@ FROM INPUT4) a ORDER BY KEY2 LIMIT 10 Input: default/input4 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/945658600/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1382133902/10000 0 0 0 Index: ql/src/test/results/clientpositive/groupby7_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby7_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby7_noskew.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -95,7 +97,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1433385453/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/168306151/10004 Reduce Output Operator key expressions: expr: key @@ -159,7 +161,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/341712473/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/27584412/10000 0 0.0 10 10.0 100 200.0 @@ -471,7 +473,7 @@ 98 196.0 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/814627744/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/104857768/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -17,11 +17,15 @@ Map Reduce Alias -> Map Operator Tree: s + TableScan + alias: s Filter Operator + isSamplingPred: false predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean @@ -35,7 +39,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1341797982/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1676597162/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -49,14 +53,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -72,7 +76,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket @@ -82,11 +86,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1341797982/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1115367091/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1676597162/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/438102255/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1341797982/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1676597162/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -100,9 +104,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1341797982/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1676597162/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1341797982/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1676597162/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -117,7 +121,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -125,7 +129,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1115367091/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/438102255/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -138,7 +142,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -147,7 +151,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1115367091/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/438102255/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -161,10 +165,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1115367091/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/438102255/10001 query: INSERT OVERWRITE TABLE dest1 SELECT s.* @@ -173,7 +177,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1410483320/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1802917889/10000 474 val_475 62 val_63 468 val_469 Index: ql/src/test/results/clientpositive/join25.q.out =================================================================== --- ql/src/test/results/clientpositive/join25.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join25.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -71,6 +73,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -124,10 +128,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/235265877/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1885483118/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/337659152/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1821621944/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -171,7 +175,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2088477644/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/123130192/10000 66 val_66 val_66 98 val_98 val_98 98 val_98 val_98 Index: ql/src/test/results/clientpositive/groupby6_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby6_map.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -72,7 +74,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/773047983/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1366391771/10000 0 1 2 Index: ql/src/test/results/clientpositive/udf_like.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_like.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_like.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(86)) @@ -67,5 +69,5 @@ '%_' LIKE '\%\_', 'ab' LIKE '\%\_', 'ab' LIKE '_a%', 'ab' LIKE 'a' FROM src WHERE src.key = 86 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1035005626/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/380805800/10000 true false true true false false false false true false false false Index: ql/src/test/results/clientpositive/join5.q.out =================================================================== --- ql/src/test/results/clientpositive/join5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join5.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: c:a:src1 + TableScan + alias: src1 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) @@ -55,6 +57,8 @@ expr: _col1 type: string c:b:src2 + TableScan + alias: src2 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) @@ -162,7 +166,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/156874986/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/771875621/10000 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 Index: ql/src/test/results/clientpositive/ppd_join2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_join2.q.out (working copy) @@ -22,6 +22,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Filter Operator predicate: expr: ((key <> '305') and (key <> '14')) @@ -52,6 +54,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (((key <> '302') and (key < '400')) and ((key <> '311') and ((value <> 'val_50') or (key > '1')))) @@ -123,6 +127,8 @@ expr: _col1 type: string src3:src + TableScan + alias: src Filter Operator predicate: expr: ((key <> '306') and (sqrt(UDFToDouble(key)) <> UDFToDouble(13))) @@ -192,7 +198,7 @@ ON src1.c2 = src3.c6 WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13) Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/637459314/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1783863777/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/ppd_gby.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_gby.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src1:src + TableScan + alias: src Filter Operator predicate: expr: ((value > 'val_10') and (value > 'val_200')) @@ -92,7 +94,7 @@ (SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400') Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/485117888/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1347130772/10000 val_201 val_202 val_203 Index: ql/src/test/results/clientpositive/input_testxpath4.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_testxpath4.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Filter Operator predicate: expr: ((mstringstring['key_9'] is not null and lintstring.myint is not null) and lintstring is not null) @@ -45,7 +47,7 @@ OR lintstring.myint IS NOT NULL OR lintstring IS NOT NULL Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/533384116/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/607559356/10000 NULL [0] NULL [1] NULL [4] @@ -74,6 +76,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Filter Operator predicate: expr: ((mstringstring['key_9'] is not null and lintstring.myint is not null) and lintstring is not null) @@ -107,7 +111,7 @@ OR lintstring.myint IS NOT NULL OR lintstring IS NOT NULL Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1269187205/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/364349054/10000 NULL [0] NULL [1] NULL [4] Index: ql/src/test/results/clientpositive/udf_case_thrift.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_case_thrift.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_case_thrift.q.out (working copy) @@ -27,6 +27,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: CASE (lint[0]) WHEN (0) THEN ((lint[0] + 1)) WHEN (1) THEN ((lint[0] + 2)) WHEN (2) THEN (100) ELSE (5) END @@ -66,7 +68,7 @@ END)[0] FROM src_thrift LIMIT 3 Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/861207644/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/87841953/10000 1 zero 0 3 10 is ten NULL 100 default NULL Index: ql/src/test/results/clientpositive/union10.q.out =================================================================== --- ql/src/test/results/clientpositive/union10.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union10.q.out (working copy) @@ -31,6 +31,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1:unionsrc-subquery1-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -66,7 +68,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1622224278/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1115593242/10002 Union Select Operator expressions: @@ -90,7 +92,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1622224278/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1115593242/10004 Union Select Operator expressions: @@ -114,7 +116,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1622224278/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1115593242/10005 Union Select Operator expressions: @@ -145,10 +147,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1160998678/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1365326225/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1622224278/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1115593242/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -185,6 +187,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery2:unionsrc-subquery1-subquery2:s2 + TableScan + alias: s2 Select Operator Group By Operator aggregations: @@ -221,6 +225,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s3 + TableScan + alias: s3 Select Operator Group By Operator aggregations: @@ -264,7 +270,7 @@ Output: default/tmptable query: select * from tmptable x sort by x.key Input: default/tmptable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1108373371/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/994551833/10000 tst1 500 tst2 500 tst3 500 Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 0) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 0) @@ -0,0 +1,430 @@ +query: EXPLAIN EXTENDED + FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + Reduce Operator Tree: + Join Operator + condition map: + Outer Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col2, _col3 + Filter Operator + isSamplingPred: false + predicate: + expr: ((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col2) > UDFToDouble(15))) and (UDFToDouble(_col2) < UDFToDouble(25))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/520891215/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/src +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/630976667/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 +query: EXPLAIN EXTENDED + FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + Partition + partition values: + ds 2008-04-09 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Partition + partition values: + ds 2008-04-09 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + Reduce Operator Tree: + Join Operator + condition map: + Outer Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Filter Operator + isSamplingPred: false + predicate: + expr: (((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col2) > UDFToDouble(15))) and (UDFToDouble(_col2) < UDFToDouble(25))) and (_col4 = '2008-04-08')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1312771848/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/srcpart/ds=2008-04-09/hr=11 +Input: default/srcpart/ds=2008-04-09/hr=12 +Input: default/src +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2067779225/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 Index: ql/src/test/results/clientpositive/groupby3_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby3_map_skew.q.out (working copy) @@ -24,6 +24,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -97,7 +99,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/emil/hive1/hive1/build/ql/tmp/738212968/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/979355284/10002 Reduce Output Operator sort order: tag: -1 @@ -211,6 +213,6 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/528887072/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/293386936/10000 130091.0 260.182 256.10355987055016 98.0 0.0 142.9268095075238 143.06995106518906 20428.072876 20469.01089779559 query: DROP TABLE dest1 Index: ql/src/test/results/clientpositive/union18.q.out =================================================================== --- ql/src/test/results/clientpositive/union18.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union18.q.out (working copy) @@ -32,6 +32,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -67,7 +69,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1650219158/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/970285192/10004 Union Select Operator expressions: @@ -101,7 +103,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest2 - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1650219158/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/970285192/10007 Union Select Operator expressions: @@ -142,10 +144,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1809064941/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1625764510/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1650219158/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/970285192/10005 Reduce Output Operator sort order: Map-reduce partition columns: @@ -184,10 +186,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1809064941/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1625764510/10002 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1650219158/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/970285192/10006 Reduce Output Operator sort order: Map-reduce partition columns: @@ -226,6 +228,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -251,7 +255,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 SORT BY DEST1.key, DEST1.value Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/83349738/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1040816000/10000 0 val_0 0 val_0 0 val_0 @@ -755,7 +759,7 @@ tst1 500 query: SELECT DEST2.* FROM DEST2 SORT BY DEST2.key, DEST2.val1, DEST2.val2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/225770887/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/491493524/10000 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 Index: ql/src/test/results/clientpositive/join11.q.out =================================================================== --- ql/src/test/results/clientpositive/join11.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join11.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -37,6 +39,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -96,7 +100,7 @@ (SELECT src.key as c3, src.value as c4 from src) src2 ON src1.c1 = src2.c3 AND src1.c1 < 100 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/542706340/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1302752228/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input31.q.out =================================================================== --- ql/src/test/results/clientpositive/input31.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input31.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: srcbucket + TableScan + alias: srcbucket Select Operator Group By Operator aggregations: @@ -71,7 +73,7 @@ Output: default/tst_dest31 query: select * from tst_dest31 Input: default/tst_dest31 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1184857393/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2034201146/10000 493 query: drop table tst_dest31 query: drop table dest31 Index: ql/src/test/results/clientpositive/udf8.q.out =================================================================== --- ql/src/test/results/clientpositive/udf8.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf8.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: c1 @@ -72,5 +74,5 @@ query: SELECT avg(c1), sum(c1), count(c1) FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1105626968/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/562771433/10000 1.0 1.0 1 Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -22,11 +22,15 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:subq1-subquery1:x + TableScan + alias: x Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) < UDFToDouble(20)) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) < UDFToDouble(20)) type: boolean @@ -70,7 +74,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/941793943/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/890087702/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -84,15 +88,19 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 null-subquery2:subq1-subquery2:x1 + TableScan + alias: x1 Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) > UDFToDouble(100)) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(key) > UDFToDouble(100)) type: boolean @@ -136,7 +144,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/941793943/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/890087702/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -150,7 +158,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Local Work: @@ -161,6 +169,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -193,7 +203,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/941793943/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/890087702/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -207,14 +217,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -229,7 +239,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src @@ -239,11 +249,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/941793943/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1982701611/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/890087702/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1815358435/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/941793943/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/890087702/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -259,9 +269,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/941793943/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/890087702/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/941793943/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/890087702/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -276,7 +286,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Reduce Operator Tree: @@ -284,7 +294,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1982701611/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1815358435/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -297,7 +307,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 @@ -306,7 +316,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1982701611/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1815358435/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -320,10 +330,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1982701611/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1815358435/10001 query: INSERT OVERWRITE TABLE dest_j1 @@ -339,7 +349,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1728235076/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/847132688/10000 128 val_128 128 val_128 128 val_128 Index: ql/src/test/results/clientpositive/udf_json.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_json.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_json.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: src_json + TableScan + alias: src_json Select Operator expressions: expr: get_json_object(json, '$.owner') @@ -35,33 +37,33 @@ query: SELECT get_json_object(src_json.json, '$') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2002061679/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1640975469/10000 {"store":{"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],"book":[{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}],"basket":[[1,2,{"b":"y","a":"x"}],[3,4],[5,6]],"bicycle":{"price":19.95,"color":"red"}},"email":"amy@only_for_json_udf_test.net","owner":"amy"} query: SELECT get_json_object(src_json.json, '$.owner'), get_json_object(src_json.json, '$.store') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/374886900/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1729246433/10000 amy {"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],"book":[{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}],"basket":[[1,2,{"b":"y","a":"x"}],[3,4],[5,6]],"bicycle":{"price":19.95,"color":"red"}} query: SELECT get_json_object(src_json.json, '$.store.bicycle'), get_json_object(src_json.json, '$.store.book') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1994650922/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/371903655/10000 {"price":19.95,"color":"red"} [{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}] query: SELECT get_json_object(src_json.json, '$.store.book[0]'), get_json_object(src_json.json, '$.store.book[*]') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/854690811/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/238965518/10000 {"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95} [{"author":"Nigel Rees","category":"reference","title":"Sayings of the Century","price":8.95},{"author":"Herman Melville","category":"fiction","title":"Moby Dick","price":8.99,"isbn":"0-553-21311-3"},{"author":"J. R. R. Tolkien","category":"fiction","title":"The Lord of the Rings","price":22.99,"reader":[{"name":"bob","age":25},{"name":"jack","age":26}],"isbn":"0-395-19395-8"}] query: SELECT get_json_object(src_json.json, '$.store.book[0].category'), get_json_object(src_json.json, '$.store.book[*].category'), get_json_object(src_json.json, '$.store.book[*].isbn'), get_json_object(src_json.json, '$.store.book[*].reader') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1666673482/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1747766822/10000 reference ["reference","fiction","fiction"] ["0-553-21311-3","0-395-19395-8"] [{"name":"bob","age":25},{"name":"jack","age":26}] query: SELECT get_json_object(src_json.json, '$.store.book[*].reader[0].age'), get_json_object(src_json.json, '$.store.book[*].reader[*].age') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/396895264/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1807289940/10000 25 [25,26] query: SELECT get_json_object(src_json.json, '$.store.basket[0][1]'), get_json_object(src_json.json, '$.store.basket[*]'), get_json_object(src_json.json, '$.store.basket[*][0]'), get_json_object(src_json.json, '$.store.basket[0][*]'), get_json_object(src_json.json, '$.store.basket[*][*]'), get_json_object(src_json.json, '$.store.basket[0][2].b'), get_json_object(src_json.json, '$.store.basket[0][*].b') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/665338728/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1034799624/10000 2 [[1,2,{"b":"y","a":"x"}],[3,4],[5,6]] 1 [1,2,{"b":"y","a":"x"}] [1,2,{"b":"y","a":"x"},3,4,5,6] y ["y"] query: SELECT get_json_object(src_json.json, '$.non_exist_key'), get_json_object(src_json.json, '$..no_recursive'), get_json_object(src_json.json, '$.store.book[10]'), get_json_object(src_json.json, '$.store.book[0].non_exist_key'), get_json_object(src_json.json, '$.store.basket[*].non_exist_key'), get_json_object(src_json.json, '$.store.basket[0][*].non_exist_key') FROM src_json Input: default/src_json -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1377080996/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/875672224/10000 NULL NULL NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/join19.q.out =================================================================== --- ql/src/test/results/clientpositive/join19.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join19.q.out (working copy) @@ -69,6 +69,8 @@ Map Reduce Alias -> Map Operator Tree: t22:t2 + TableScan + alias: t2 Filter Operator predicate: expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') @@ -97,6 +99,8 @@ expr: _col1 type: string t33:t3 + TableScan + alias: t3 Filter Operator predicate: expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from') @@ -125,6 +129,8 @@ expr: _col0 type: string t11:t1 + TableScan + alias: t1 Filter Operator predicate: expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Citation')) @@ -188,6 +194,8 @@ expr: _col2 type: string t55:t5 + TableScan + alias: t5 Filter Operator predicate: expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to') @@ -216,6 +224,8 @@ expr: _col1 type: string t44:t4 + TableScan + alias: t4 Filter Operator predicate: expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Author')) @@ -259,6 +269,8 @@ Map Reduce Alias -> Map Operator Tree: t66:t6 + TableScan + alias: t6 Filter Operator predicate: expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') Index: ql/src/test/results/clientpositive/udf_reverse.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reverse.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_reverse.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src1 + TableScan + alias: src1 Select Operator expressions: expr: reverse(value) @@ -33,10 +35,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1099092133/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2139233776/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1419805399/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1684174409/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -73,7 +75,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2026875465/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/38069188/10000 832_lav 113_lav @@ -107,6 +109,6 @@ query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1 query: SELECT count(1) FROM dest1 WHERE reverse(dest1.name) = _UTF-8 0xE993AEE982B5 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1752039615/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/484118/10000 1 query: DROP TABLE dest1 Index: ql/src/test/results/clientpositive/union3.q.out =================================================================== --- ql/src/test/results/clientpositive/union3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union3.q.out (working copy) @@ -36,6 +36,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:a-subquery2:s2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -70,7 +72,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1900161365/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/831850354/10002 Union Select Operator expressions: @@ -83,7 +85,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1900161365/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/831850354/10003 Union Select Operator expressions: @@ -96,7 +98,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1900161365/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/831850354/10005 Union Select Operator expressions: @@ -109,7 +111,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1900161365/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/831850354/10007 Union Select Operator expressions: @@ -127,6 +129,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery2:a-subquery1-subquery2:s2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -162,6 +166,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1-subquery1:a-subquery1-subquery1-subquery1:s1:src + TableScan + alias: src Select Operator expressions: expr: key @@ -196,7 +202,7 @@ Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1900161365/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/831850354/10004 Reduce Output Operator key expressions: expr: _col0 @@ -222,6 +228,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1-subquery2:a-subquery1-subquery1-subquery2:s1:src + TableScan + alias: src Select Operator expressions: expr: key @@ -256,7 +264,7 @@ Stage: Stage-7 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1900161365/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/831850354/10006 Reduce Output Operator key expressions: expr: _col0 @@ -306,7 +314,7 @@ Output: default/union_out query: select * from union_out cluster by id Input: default/union_out -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/838109701/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/209669057/10000 1 2 3 Index: ql/src/test/results/clientpositive/nullgroup5.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/nullgroup5.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:u-subquery1:x + TableScan + alias: x Filter Operator predicate: expr: (ds = '2009-04-05') @@ -53,6 +55,8 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:u-subquery2:y + TableScan + alias: y Filter Operator predicate: expr: (ds = '2009-04-09') @@ -95,7 +99,7 @@ select key, value from tstparttbl2 y where y.ds='2009-04-09' )u Input: default/tstparttbl2/ds=2009-04-09 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/build/ql/tmp/248088196/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2066174418/10000 238 val_238 86 val_86 311 val_311 Index: ql/src/test/results/clientpositive/input1_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input1_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input1_limit.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -88,7 +90,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1245617876/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1614539331/10004 Reduce Output Operator sort order: tag: -1 @@ -135,7 +137,7 @@ Output: default/dest2 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/62325349/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1722063757/10000 86 val_86 27 val_27 98 val_98 @@ -148,7 +150,7 @@ 57 val_57 query: SELECT dest2.* FROM dest2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1944665734/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1675468164/10000 86 val_86 27 val_27 98 val_98 Index: ql/src/test/results/clientpositive/groupby8.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby8.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Reduce Output Operator key expressions: expr: substr(value, 5) @@ -65,7 +67,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1989034170/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419849241/10004 Reduce Output Operator key expressions: expr: _col0 @@ -123,7 +125,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1989034170/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419849241/10005 Reduce Output Operator key expressions: expr: _col0 @@ -187,7 +189,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1596722289/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/371059848/10000 0 1 10 1 100 1 @@ -499,7 +501,7 @@ 98 1 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/721876966/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/907975599/10000 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/input_columnarserde.q.out =================================================================== --- ql/src/test/results/clientpositive/input_columnarserde.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_columnarserde.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: lint @@ -78,7 +80,7 @@ Output: default/input_columnarserde query: SELECT input_columnarserde.* FROM input_columnarserde DISTRIBUTE BY 1 Input: default/input_columnarserde -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2034641093/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1695989715/10000 [0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0 [1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1 [2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2 @@ -92,7 +94,7 @@ null null {} 0 NULL query: SELECT input_columnarserde.a[0], input_columnarserde.b[0], input_columnarserde.c['key2'], input_columnarserde.d, input_columnarserde.e FROM input_columnarserde DISTRIBUTE BY 1 Input: default/input_columnarserde -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/334887658/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1755799278/10000 0 0 NULL 1712634731 record_0 1 10 NULL 465985200 record_1 2 20 NULL -751827638 record_2 Index: ql/src/test/results/clientpositive/case_sensitivity.q.out =================================================================== --- ql/src/test/results/clientpositive/case_sensitivity.q.out (revision 801363) +++ ql/src/test/results/clientpositive/case_sensitivity.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Filter Operator predicate: expr: (lint[0] > 0) @@ -45,10 +47,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2103484307/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/150742864/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2009660724/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2122837325/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -88,7 +90,7 @@ Output: default/dest1 query: SELECT DEST1.* FROM Dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1564334315/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1814455060/10000 2 1 4 8 6 27 Index: ql/src/test/results/clientpositive/groupby10.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby10.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby10.q.out (working copy) @@ -24,6 +24,8 @@ Map Reduce Alias -> Map Operator Tree: input + TableScan + alias: input Reduce Output Operator key expressions: expr: substr(value, 5) @@ -72,7 +74,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1833926934/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/458423382/10004 Reduce Output Operator key expressions: expr: _col0 @@ -137,7 +139,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1833926934/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/458423382/10005 Reduce Output Operator key expressions: expr: _col0 @@ -208,7 +210,7 @@ Output: default/dest2 query: SELECT * from dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1082380771/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1557071213/10000 27 1 1 66 1 1 86 1 1 @@ -231,7 +233,7 @@ 484 1 1 query: SELECT * from dest2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1659136589/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1527114002/10000 27 27 27 66 66 66 86 86 86 Index: ql/src/test/results/clientpositive/mapreduce6.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce6.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -92,7 +94,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/281365680/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/784520367/10000 490 49 0 val_490 491 49 1 val_491 492 49 2 val_492 Index: ql/src/test/results/clientpositive/udf_if.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_if.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_if.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: if(true, 1, 2) @@ -54,7 +56,7 @@ IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6 FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1917296468/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1117188098/10000 1 1 1 1 NULL 2 query: -- Type conversions EXPLAIN @@ -75,6 +77,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: if(true, UDFToShort(128), UDFToByte(1)) @@ -105,5 +109,5 @@ IF(FALSE, 'ABC', 12.3) AS COL4 FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/903772370/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/97050959/10000 128 1.1 ABC 12.3 Index: ql/src/test/results/clientpositive/groupby1_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby1_noskew.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -80,7 +82,7 @@ Output: default/dest_g1 query: SELECT dest_g1.* FROM dest_g1 Input: default/dest_g1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1408602511/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1062669499/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/groupby2_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby2_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby2_map_skew.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -71,7 +73,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/340084845/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1895770841/10002 Reduce Output Operator key expressions: expr: _col0 @@ -140,7 +142,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1380797269/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/668586222/10000 0 1 00.0 1 71 116414.0 2 69 225571.0 Index: ql/src/test/results/clientpositive/ppd_outer_join1.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_outer_join1.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Reduce Output Operator key expressions: expr: key @@ -33,6 +35,8 @@ expr: value type: string a + TableScan + alias: a Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) @@ -94,7 +98,7 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/1086203746/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1157143291/10000 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 Index: ql/src/test/results/clientpositive/join20.q.out =================================================================== --- ql/src/test/results/clientpositive/join20.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join20.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -29,6 +31,8 @@ expr: value type: string src3 + TableScan + alias: src3 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(20)) @@ -52,6 +56,8 @@ expr: value type: string src1 + TableScan + alias: src1 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -109,7 +115,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1580643489/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1527809790/10002 Reduce Output Operator key expressions: expr: _col0 @@ -156,7 +162,7 @@ query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2102648100/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/857127004/10000 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 11 val_11 NULL NULL NULL NULL 12 val_12 @@ -241,6 +247,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(15)) @@ -264,6 +272,8 @@ expr: value type: string src3 + TableScan + alias: src3 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(20)) @@ -287,6 +297,8 @@ expr: value type: string src1 + TableScan + alias: src1 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -344,7 +356,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/930461365/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1169417951/10002 Reduce Output Operator key expressions: expr: _col0 @@ -391,7 +403,7 @@ query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key < 15) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/89670769/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104890514/10000 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 11 val_11 NULL NULL NULL NULL 12 val_12 Index: ql/src/test/results/clientpositive/input25.q.out =================================================================== --- ql/src/test/results/clientpositive/input25.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input25.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:subq-subquery1:x + TableScan + alias: x Filter Operator predicate: expr: (d = '2009-01-01') @@ -64,7 +66,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1648055539/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/197206999/10002 Union Select Operator expressions: @@ -81,7 +83,7 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1648055539/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/197206999/10003 Union Select Operator expressions: @@ -103,6 +105,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:subq-subquery2:x + TableScan + alias: x Filter Operator predicate: expr: (d = '2009-02-02') @@ -153,5 +157,5 @@ ) subq Input: default/tst/d=2009-01-01 Input: default/tst/d=2009-02-02 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/297483633/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1412410730/10000 query: drop table tst Index: ql/src/test/results/clientpositive/groupby3_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby3_map.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -160,6 +162,6 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/1984222937/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1726617358/10000 130091.0 260.182 256.10355987055016 98.0 0.0 142.9268095075238 143.06995106518906 20428.072876 20469.01089779559 query: DROP TABLE dest1 Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -17,15 +17,20 @@ Map Reduce Alias -> Map Operator Tree: s + TableScan + alias: s Filter Operator + isSamplingPred: false predicate: expr: ((((hash(key) & 2147483647) % 4) = 0) and (key > 100)) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (key > 100) type: boolean @@ -39,7 +44,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1625728578/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1942666361/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -53,14 +58,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -76,7 +81,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket @@ -86,11 +91,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1625728578/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1016746713/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1942666361/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/555567283/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1625728578/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1942666361/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -104,9 +109,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1625728578/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1942666361/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1625728578/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1942666361/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -121,7 +126,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -129,7 +134,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1016746713/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/555567283/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -142,7 +147,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -151,7 +156,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1016746713/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/555567283/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -165,10 +170,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1016746713/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/555567283/10001 query: INSERT OVERWRITE TABLE dest1 SELECT s.* @@ -178,7 +183,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1683032375/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1488614236/10000 468 val_469 272 val_273 448 val_449 Index: ql/src/test/results/clientpositive/join28.q.out =================================================================== --- ql/src/test/results/clientpositive/join28.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join28.q.out (working copy) @@ -21,6 +21,8 @@ Map Reduce Alias -> Map Operator Tree: subq:y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -46,6 +48,8 @@ limit: -1 Alias -> Map Local Operator Tree: subq:x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -67,7 +71,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1376088811/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/868000036/10002 Select Operator expressions: expr: _col0 @@ -119,6 +123,8 @@ limit: -1 Alias -> Map Local Operator Tree: z + TableScan + alias: z Filter Operator predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(hr) = UDFToDouble(11))) @@ -171,10 +177,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/826602054/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/207829460/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1376088811/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/868000036/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -207,6 +213,7 @@ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 + query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(z) */ subq.key1, z.value FROM @@ -219,7 +226,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2099739143/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/380670394/10000 128 val_128 128 val_128 128 val_128 Index: ql/src/test/results/clientpositive/type_cast_1.q.out =================================================================== --- ql/src/test/results/clientpositive/type_cast_1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/type_cast_1.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: (if(false, 1, UDFToShort(2)) + 3) @@ -32,5 +34,5 @@ query: SELECT IF(false, 1, cast(2 as smallint)) + 3 FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1822579210/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2072313548/10000 5 Index: ql/src/test/results/clientpositive/ppd_udf_case.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_udf_case.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_udf_case.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Filter Operator predicate: expr: (ds = '2008-04-08') @@ -46,6 +48,8 @@ expr: hr type: string a + TableScan + alias: a Filter Operator predicate: expr: ((ds = '2008-04-08') and CASE (key) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END) @@ -109,7 +113,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/600222764/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/331138227/10002 Reduce Output Operator key expressions: expr: _col0 @@ -174,7 +178,7 @@ ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr Input: default/srcpart/ds=2008-04-08/hr=11 Input: default/srcpart/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/178170122/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1976930760/10000 27 val_27 2008-04-08 11 27 val_27 2008-04-08 11 27 val_27 2008-04-08 11 27 val_27 2008-04-08 12 27 val_27 2008-04-08 12 27 val_27 2008-04-08 11 Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 0) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 0) @@ -0,0 +1,361 @@ +query: EXPLAIN EXTENDED +FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 AND tmap.ds = '2008-04-08' +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) ds) (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST ds tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (AND (< (. (TOK_TABLE_OR_COL tmap) tkey) 100) (= (. (TOK_TABLE_OR_COL tmap) ds) '2008-04-08'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + tmap:src + TableScan + alias: src + Select Operator + expressions: + expr: ds + type: string + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1, _col2 + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns ds,tkey,tvalue + serialization.format 9 + Reduce Output Operator + key expressions: + expr: tkey + type: string + sort order: + + Map-reduce partition columns: + expr: tkey + type: string + tag: -1 + value expressions: + expr: ds + type: string + expr: tkey + type: string + expr: tvalue + type: string + Needs Tagging: false + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + Partition + partition values: + ds 2008-04-09 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Partition + partition values: + ds 2008-04-09 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Extract + Filter Operator + isSamplingPred: false + predicate: + expr: ((UDFToDouble(_col1) < UDFToDouble(100)) and (_col0 = '2008-04-08')) + type: boolean + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1641597536/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + serialization.format 1 + columns.types string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 AND tmap.ds = '2008-04-08' +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/srcpart/ds=2008-04-09/hr=11 +Input: default/srcpart/ds=2008-04-09/hr=12 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/388156213/10000 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +10 val_10 +11 val_11 +11 val_11 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +17 val_17 +17 val_17 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +19 val_19 +19 val_19 +2 val_2 +2 val_2 +20 val_20 +20 val_20 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +27 val_27 +27 val_27 +28 val_28 +28 val_28 +30 val_30 +30 val_30 +33 val_33 +33 val_33 +34 val_34 +34 val_34 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +4 val_4 +4 val_4 +41 val_41 +41 val_41 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +43 val_43 +43 val_43 +44 val_44 +44 val_44 +47 val_47 +47 val_47 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +53 val_53 +53 val_53 +54 val_54 +54 val_54 +57 val_57 +57 val_57 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +64 val_64 +64 val_64 +65 val_65 +65 val_65 +66 val_66 +66 val_66 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +69 val_69 +69 val_69 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +74 val_74 +74 val_74 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +77 val_77 +77 val_77 +78 val_78 +78 val_78 +8 val_8 +8 val_8 +80 val_80 +80 val_80 +82 val_82 +82 val_82 +83 val_83 +83 val_83 +83 val_83 +83 val_83 +84 val_84 +84 val_84 +84 val_84 +84 val_84 +85 val_85 +85 val_85 +86 val_86 +86 val_86 +87 val_87 +87 val_87 +9 val_9 +9 val_9 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +92 val_92 +95 val_95 +95 val_95 +95 val_95 +95 val_95 +96 val_96 +96 val_96 +97 val_97 +97 val_97 +97 val_97 +97 val_97 +98 val_98 +98 val_98 +98 val_98 +98 val_98 Index: ql/src/test/results/clientpositive/join0.q.out =================================================================== --- ql/src/test/results/clientpositive/join0.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join0.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -42,6 +44,8 @@ expr: _col1 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -94,7 +98,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/63021860/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266394522/10002 Reduce Output Operator key expressions: expr: _col0 @@ -137,7 +141,7 @@ (SELECT * FROM src WHERE src.key < 10) src2 SORT BY k1, v1, k2, v2 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2035203874/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1838206361/10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/join_rc.q.out =================================================================== --- ql/src/test/results/clientpositive/join_rc.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join_rc.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: join_rc2 + TableScan + alias: join_rc2 Reduce Output Operator key expressions: expr: key @@ -36,6 +38,8 @@ expr: value type: string join_rc1 + TableScan + alias: join_rc1 Reduce Output Operator key expressions: expr: key @@ -79,7 +83,7 @@ FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key Input: default/join_rc2 Input: default/join_rc1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1293645641/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/206616711/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/input5.q.out =================================================================== --- ql/src/test/results/clientpositive/input5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input5.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: lint @@ -85,7 +87,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/915690046/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/120418612/10000 [0,0,0] [{"myint":0,"mystring":"0","underscore_int":0}] [1,2,3] [{"myint":1,"mystring":"1","underscore_int":1}] [2,4,6] [{"myint":4,"mystring":"8","underscore_int":2}] Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 801363) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src + TableScan + alias: src Select Operator expressions: expr: key @@ -54,9 +56,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -71,12 +73,13 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src Reduce Operator Tree: Extract Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(_col0) < UDFToDouble(100)) type: boolean @@ -90,7 +93,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/692408203/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/522106028/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -103,7 +106,6 @@ Fetch Operator limit: -1 - query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) @@ -112,7 +114,7 @@ ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100 Input: default/src -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/136730416/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1714962148/10000 0 0 3 0 0 3 0 0 3 @@ -217,6 +219,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src + TableScan + alias: src Select Operator expressions: expr: key @@ -253,9 +257,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -270,12 +274,13 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src Reduce Operator Tree: Extract Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(_col0) < UDFToDouble(100)) type: boolean @@ -289,7 +294,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1786530675/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1763826161/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -302,7 +307,6 @@ Fetch Operator limit: -1 - query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) @@ -311,7 +315,7 @@ ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100 Input: default/src -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1351305244/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1100247728/10000 0 0 3 0 0 3 0 0 3 Index: ql/src/test/results/clientpositive/input_testsequencefile.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testsequencefile.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_testsequencefile.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -44,10 +46,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1551879808/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/66296780/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/333825755/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/696386493/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -87,7 +89,7 @@ Output: default/dest4_sequencefile query: SELECT dest4_sequencefile.* FROM dest4_sequencefile Input: default/dest4_sequencefile -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1330571265/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1740018571/10000 238 val_238 86 val_86 311 val_311 Index: ql/src/test/results/clientpositive/join8.q.out =================================================================== --- ql/src/test/results/clientpositive/join8.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join8.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: c:a:src1 + TableScan + alias: src1 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) @@ -55,6 +57,8 @@ expr: _col1 type: string c:b:src2 + TableScan + alias: src2 Filter Operator predicate: expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) @@ -166,7 +170,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2108029015/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/214964657/10000 11 val_11 NULL NULL 12 val_12 NULL NULL 12 val_12 NULL NULL Index: ql/src/test/results/clientpositive/udf_case_column_pruning.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_case_column_pruning.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_case_column_pruning.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Reduce Output Operator key expressions: expr: key @@ -30,6 +32,8 @@ type: string tag: 1 a + TableScan + alias: a Reduce Output Operator key expressions: expr: key @@ -65,7 +69,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/413650018/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1921754702/10002 Reduce Output Operator key expressions: expr: _col0 @@ -99,7 +103,7 @@ ON a.key = b.key ORDER BY key LIMIT 10 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1762314926/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/8014055/10000 5 5 5 Index: ql/src/test/results/clientpositive/rand_partitionpruner1.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (working copy) @@ -12,7 +12,10 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator + isSamplingPred: false predicate: expr: (rand(UDFToLong(1)) < 0.1) type: boolean @@ -26,7 +29,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1509492939/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/972674441/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -36,9 +39,9 @@ columns.types string:string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -53,7 +56,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src @@ -64,7 +67,7 @@ query: select * from src where rand(1) < 0.1 Input: default/src -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1647273851/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1925118619/10000 409 val_409 429 val_429 209 val_209 Index: ql/src/test/results/clientpositive/union13.q.out =================================================================== --- ql/src/test/results/clientpositive/union13.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union13.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator expressions: expr: key @@ -37,6 +39,8 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -67,7 +71,7 @@ query: select unionsrc.key, unionsrc.value FROM (select s1.key as key, s1.value as value from src s1 UNION ALL select s2.key as key, s2.value as value from src s2) unionsrc Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/362764706/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/49034053/10000 238 val_238 238 val_238 86 val_86 Index: ql/src/test/results/clientpositive/groupby1_map_skew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby1_map_skew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby1_map_skew.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -60,7 +62,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/288360441/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1847926486/10002 Reduce Output Operator key expressions: expr: _col0 @@ -121,7 +123,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1461563425/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1826125083/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/input11.q.out =================================================================== --- ql/src/test/results/clientpositive/input11.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input11.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -52,10 +54,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/947366585/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1030796464/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/232837920/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1121239779/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -95,7 +97,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/68605859/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2019052885/10000 86 val_86 27 val_27 98 val_98 Index: ql/src/test/results/clientpositive/noalias_subq1.q.out =================================================================== --- ql/src/test/results/clientpositive/noalias_subq1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/noalias_subq1.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -46,7 +48,7 @@ query: SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1592516174/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1984163495/10000 val_86 val_27 val_98 Index: ql/src/test/results/clientpositive/udf3.q.out =================================================================== --- ql/src/test/results/clientpositive/udf3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf3.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator Group By Operator aggregations: @@ -87,5 +89,5 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/324597347/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2020825677/10000 0 NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/groupby6_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby6_noskew.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -66,7 +68,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/537807797/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1334215067/10000 0 1 2 Index: ql/src/test/results/clientpositive/input_testxpath.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_testxpath.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Select Operator expressions: expr: lint[1] @@ -39,10 +41,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/893288498/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/794454681/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/647174747/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/492458082/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -84,7 +86,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/761702774/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/264689843/10000 0 0 NULL 2 1 NULL 4 8 value_2 Index: ql/src/test/results/clientpositive/join14.q.out =================================================================== --- ql/src/test/results/clientpositive/join14.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join14.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Filter Operator predicate: expr: (ds = '2008-04-08') @@ -35,6 +37,8 @@ expr: value type: string src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(100)) @@ -105,7 +109,7 @@ Output: default/dest1 query: select dest1.* from dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/217012065/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1973958264/10000 103 val_103 103 val_103 103 val_103 Index: ql/src/test/results/clientpositive/ppd_gby2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_gby2.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src1:src + TableScan + alias: src Filter Operator predicate: expr: ((value > 'val_10') and (value > 'val_200')) @@ -97,7 +99,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1032558559/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1187596438/10002 Reduce Output Operator key expressions: expr: _col0 @@ -144,7 +146,7 @@ WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') GROUP BY src1.c2 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/502393484/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/825792285/10000 val_4 1 val_399 2 val_396 3 Index: ql/src/test/results/clientpositive/groupby3.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby3.q.out (working copy) @@ -24,6 +24,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -62,7 +64,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/emil/hive1/hive1/build/ql/tmp/1684499238/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/313900141/10002 Reduce Output Operator sort order: tag: -1 @@ -176,6 +178,6 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/1807842455/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1347361975/10000 130091.0 260.182 256.10355987055016 98.0 0.0 142.92680950752379 143.06995106518903 20428.072875999995 20469.010897795586 query: DROP TABLE dest1 Index: ql/src/test/results/clientpositive/subq.q.out =================================================================== --- ql/src/test/results/clientpositive/subq.q.out (revision 801363) +++ ql/src/test/results/clientpositive/subq.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: unioninput:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -51,10 +53,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/177397376/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1653419490/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1486120786/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212460499/10001 Reduce Output Operator sort order: Map-reduce partition columns: Index: ql/src/test/results/clientpositive/input_part6.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_part6.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator predicate: expr: (UDFToDouble(ds) = UDFToDouble(((2008 - 4) - 8))) @@ -49,4 +51,4 @@ Input: default/srcpart/ds=2008-04-08/hr=12 Input: default/srcpart/ds=2008-04-09/hr=11 Input: default/srcpart/ds=2008-04-09/hr=12 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/447936279/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/758866598/10000 Index: ql/src/test/results/clientpositive/union6.q.out =================================================================== --- ql/src/test/results/clientpositive/union6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union6.q.out (working copy) @@ -25,6 +25,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unionsrc-subquery1:s1 + TableScan + alias: s1 Select Operator Group By Operator aggregations: @@ -60,7 +62,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1186530/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1191770303/10002 Union Select Operator expressions: @@ -77,7 +79,7 @@ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: tmptable - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1186530/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1191770303/10004 Union Select Operator expressions: @@ -101,10 +103,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/764399533/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1310004866/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1186530/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1191770303/10003 Reduce Output Operator sort order: Map-reduce partition columns: @@ -141,6 +143,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery2:unionsrc-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -165,7 +169,7 @@ Output: default/tmptable query: select * from tmptable x sort by x.key, x.value Input: default/tmptable -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/730383736/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/551301266/10000 Index: ql/src/test/results/clientpositive/udf_repeat.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_repeat.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_repeat.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: repeat('Facebook', 3) @@ -47,5 +49,5 @@ repeat("asdf", -1) FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_1/build/ql/tmp/1216905646/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1342490934/10000 FacebookFacebookFacebook Index: ql/src/test/results/clientpositive/mapreduce1.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce1.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -99,7 +101,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1423458413/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1222794059/10000 0 0 0 val_0 0 0 0 val_0 0 0 0 val_0 Index: ql/src/test/results/clientpositive/input20.q.out =================================================================== --- ql/src/test/results/clientpositive/input20.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input20.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src + TableScan + alias: src Select Operator expressions: expr: key @@ -107,7 +109,7 @@ Output: default/dest1 query: SELECT * FROM dest1 SORT BY key, value Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/895109430/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/660141896/10000 1 105_105 1 10_10 1 111_111 Index: ql/src/test/results/clientpositive/input14_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input14_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input14_limit.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src + TableScan + alias: src Select Operator expressions: expr: key @@ -59,7 +61,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1622333901/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/34766397/10002 Reduce Output Operator key expressions: expr: _col0 @@ -126,7 +128,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/646323888/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/452755876/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -17,11 +17,15 @@ Map Reduce Alias -> Map Operator Tree: s + TableScan + alias: s Filter Operator + isSamplingPred: false predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean @@ -35,7 +39,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1445240847/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1185659559/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -49,14 +53,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -72,7 +76,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket @@ -82,11 +86,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1445240847/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1212311667/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1185659559/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1588988101/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1445240847/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1185659559/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -100,9 +104,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1445240847/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1185659559/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1445240847/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1185659559/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -117,7 +121,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -125,7 +129,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1212311667/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1588988101/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -138,7 +142,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -147,7 +151,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1212311667/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1588988101/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -161,10 +165,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1212311667/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1588988101/10001 query: INSERT OVERWRITE TABLE dest1 SELECT s.* @@ -173,7 +177,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1461875588/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/791982491/10000 474 val_475 62 val_63 468 val_469 Index: ql/src/test/results/clientpositive/join23.q.out =================================================================== --- ql/src/test/results/clientpositive/join23.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join23.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -26,6 +28,8 @@ expr: value type: string src1 + TableScan + alias: src1 Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(10)) @@ -71,7 +75,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1132349253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1309960424/10002 Reduce Output Operator key expressions: expr: _col0 @@ -109,7 +113,7 @@ query: SELECT * FROM src src1 JOIN src src2 WHERE src1.key < 10 and src2.key < 10 SORT BY src1.key, src1.value, src2.key, src2.value Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1211655384/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2006836626/10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/ppd_outer_join4.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_outer_join4.q.out (working copy) @@ -21,6 +21,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Reduce Output Operator key expressions: expr: key @@ -36,6 +38,8 @@ expr: value type: string c + TableScan + alias: c Reduce Output Operator key expressions: expr: key @@ -49,6 +53,8 @@ expr: key type: string a + TableScan + alias: a Reduce Output Operator key expressions: expr: key @@ -113,7 +119,7 @@ SELECT a.key, a.value, b.key, b.value, c.key WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/298031004/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1848803718/10000 150 val_150 150 val_150 150 152 val_152 152 val_152 152 152 val_152 152 val_152 152 Index: ql/src/test/results/clientpositive/regex_col.q.out =================================================================== --- ql/src/test/results/clientpositive/regex_col.q.out (revision 801363) +++ ql/src/test/results/clientpositive/regex_col.q.out (working copy) @@ -26,6 +26,8 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Select Operator expressions: expr: ds @@ -59,6 +61,8 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Select Operator expressions: expr: ds @@ -93,6 +97,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Reduce Output Operator key expressions: expr: key @@ -112,6 +118,8 @@ expr: hr type: string a + TableScan + alias: a Reduce Output Operator key expressions: expr: key @@ -178,6 +186,8 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Reduce Output Operator key expressions: expr: key @@ -201,6 +211,8 @@ expr: hr type: string a + TableScan + alias: a Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(103)) @@ -251,7 +263,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1981087431/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2084085197/10002 Reduce Output Operator key expressions: expr: _col0 @@ -286,7 +298,7 @@ Input: default/srcpart/ds=2008-04-08/hr=12 Input: default/srcpart/ds=2008-04-09/hr=11 Input: default/srcpart/ds=2008-04-09/hr=12 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/142920230/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/701163717/10000 2008-04-08 11 2008-04-08 11 2008-04-08 11 @@ -317,6 +329,8 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Select Operator expressions: expr: key @@ -348,6 +362,8 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Select Operator expressions: expr: ds @@ -379,6 +395,8 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Select Operator expressions: expr: key @@ -414,6 +432,8 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Select Operator expressions: expr: key @@ -454,7 +474,7 @@ Input: default/srcpart/ds=2008-04-08/hr=12 Input: default/srcpart/ds=2008-04-09/hr=11 Input: default/srcpart/ds=2008-04-09/hr=12 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/116029619/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/27970835/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 0) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 0) @@ -0,0 +1,827 @@ +query: EXPLAIN EXTENDED + FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + a + TableScan + alias: a + Filter Operator + isSamplingPred: false + predicate: + expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col2, _col3 + Filter Operator + isSamplingPred: false + predicate: + expr: ((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col2) > UDFToDouble(15))) and (UDFToDouble(_col2) < UDFToDouble(25))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/374479646/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/src +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/216486087/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 +query: EXPLAIN EXTENDED + FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF srcpart a) (TOK_TABREF src b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + a + TableScan + alias: a + Filter Operator + isSamplingPred: false + predicate: + expr: ((ds = '2008-04-08') and ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20)))) + type: boolean + Filter Operator + isSamplingPred: false + predicate: + expr: (ds = '2008-04-08') + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col4, _col5 + Filter Operator + isSamplingPred: false + predicate: + expr: ((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col4) > UDFToDouble(15))) and (UDFToDouble(_col4) < UDFToDouble(25))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1382329177/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +Input: default/src +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1521057709/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 +query: EXPLAIN EXTENDED + FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + a + TableScan + alias: a + Filter Operator + isSamplingPred: false + predicate: + expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 + Partition + partition values: + ds 2008-04-09 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Partition + partition values: + ds 2008-04-09 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Filter Operator + isSamplingPred: false + predicate: + expr: (((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col2) > UDFToDouble(15))) and (UDFToDouble(_col2) < UDFToDouble(25))) and (_col4 = '2008-04-08')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2039930137/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08' +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Input: default/srcpart/ds=2008-04-09/hr=11 +Input: default/srcpart/ds=2008-04-09/hr=12 +Input: default/src +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/617611715/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 +query: EXPLAIN EXTENDED + FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08' +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF srcpart a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + a + TableScan + alias: a + Filter Operator + isSamplingPred: false + predicate: + expr: (((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) and (ds = '2008-04-08')) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + Needs Tagging: true + Path -> Alias: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name src + columns.types string:string + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + name srcpart + columns.types string:string + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: srcpart + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} + 1 {VALUE._col0} {VALUE._col1} + outputColumnNames: _col0, _col1, _col2, _col4, _col5 + Filter Operator + isSamplingPred: false + predicate: + expr: (((((UDFToDouble(_col0) > UDFToDouble(10)) and (UDFToDouble(_col0) < UDFToDouble(20))) and (UDFToDouble(_col4) > UDFToDouble(15))) and (UDFToDouble(_col4) < UDFToDouble(25))) and (_col2 = '2008-04-08')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/116279383/10001 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + serialization.format 1 + columns.types string:string:string:string + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +query: FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08' +Input: default/src +Input: default/srcpart/ds=2008-04-08/hr=11 +Input: default/srcpart/ds=2008-04-08/hr=12 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/309612837/10000 +17 val_17 17 val_17 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 +19 val_19 19 val_19 Index: ql/src/test/results/clientpositive/groupby8_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby8_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby8_map.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Reduce Output Operator key expressions: expr: substr(value, 5) @@ -65,7 +67,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1155746745/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/206377783/10004 Reduce Output Operator key expressions: expr: _col0 @@ -123,7 +125,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1155746745/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/206377783/10005 Reduce Output Operator key expressions: expr: _col0 @@ -187,7 +189,7 @@ Output: default/dest2 query: SELECT DEST1.* FROM DEST1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1384333034/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/317456248/10000 0 1 10 1 100 1 @@ -499,7 +501,7 @@ 98 1 query: SELECT DEST2.* FROM DEST2 Input: default/dest2 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2072775887/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/982390628/10000 0 1 10 1 100 1 Index: ql/src/test/results/clientpositive/join_thrift.q.out =================================================================== --- ql/src/test/results/clientpositive/join_thrift.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join_thrift.q.out (working copy) @@ -22,6 +22,8 @@ Map Reduce Alias -> Map Operator Tree: s2 + TableScan + alias: s2 Reduce Output Operator key expressions: expr: aint @@ -35,6 +37,8 @@ expr: lintstring type: array> s1 + TableScan + alias: s1 Reduce Output Operator key expressions: expr: aint @@ -79,7 +83,7 @@ JOIN src_thrift s2 ON s1.aint = s2.aint Input: default/src_thrift -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2021763754/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/589957555/10000 -1952710710 [{"myint":25,"mystring":"125","underscore_int":5}] -1461153973 [{"myint":49,"mystring":"343","underscore_int":7}] -751827638 [{"myint":4,"mystring":"8","underscore_int":2}] Index: ql/src/test/results/clientpositive/quote2.q.out =================================================================== --- ql/src/test/results/clientpositive/quote2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/quote2.q.out (working copy) @@ -24,6 +24,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: 'abc' @@ -92,5 +94,5 @@ FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1644354823/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1457515970/10000 abc abc abc' abc" abc\ abc\ abc\' abc\" abc\\ abc\\ abc\\' abc\\" abc\\\ abc\\\ abc""""\ abc''''\ awk '{print NR"\t"$0}' tab tab tab tab Index: ql/src/test/results/clientpositive/merge1.q.out =================================================================== --- ql/src/test/results/clientpositive/merge1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/merge1.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -79,10 +81,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1006862555/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1134406044/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1670149848/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2072613560/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -122,7 +124,7 @@ Output: default/dest1 query: select * from dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1463256320/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1979784812/10000 0 3 10 1 100 2 Index: ql/src/test/results/clientpositive/join3.q.out =================================================================== --- ql/src/test/results/clientpositive/join3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join3.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -24,6 +26,8 @@ type: string tag: 1 src3 + TableScan + alias: src3 Reduce Output Operator key expressions: expr: key @@ -37,6 +41,8 @@ expr: value type: string src1 + TableScan + alias: src1 Reduce Output Operator key expressions: expr: key @@ -99,7 +105,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/1857539841/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1375529446/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/udf_elt.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_elt.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_elt.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: elt(2, 'abc', 'defg') @@ -74,5 +76,5 @@ elt(3, 'abc', 'defg') FROM src LIMIT 1 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1869995338/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/298713096/10000 defg cc abc 2 12345 123456789012 1.25 16.0 NULL NULL NULL Index: ql/src/test/results/clientpositive/input_testxpath2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath2.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_testxpath2.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src_thrift + TableScan + alias: src_thrift Filter Operator predicate: expr: (lint is not null and not mstringstring is null) @@ -47,10 +49,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1379940084/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/630839318/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/476600317/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/284782500/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -92,7 +94,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1955798918/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/413755901/10000 3 1 1 3 1 1 3 1 1 Index: ql/src/test/results/clientpositive/udf_ascii.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_ascii.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_ascii.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: ascii('Facebook') @@ -43,5 +45,5 @@ ascii('!') FROM src LIMIT 1 Input: default/src -Output: file:/data/users/njain/hive_commit1/hive_commit1/.ptest_0/build/ql/tmp/1477274804/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/317294381/10000 70 0 33 Index: ql/src/test/results/clientpositive/input8.q.out =================================================================== --- ql/src/test/results/clientpositive/input8.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input8.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src1 + TableScan + alias: src1 Select Operator expressions: expr: (UDFToDouble(4) + null) @@ -48,10 +50,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2001020819/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1626829975/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/546572245/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2078440366/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -93,7 +95,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/290179113/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/60632490/10000 NULL NULL NULL NULL NULL NULL NULL NULL NULL Index: ql/src/test/results/clientpositive/union.q.out =================================================================== --- ql/src/test/results/clientpositive/union.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1:unioninput-subquery1:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) < UDFToDouble(100)) @@ -50,6 +52,8 @@ input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unioninput-subquery2:src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) > UDFToDouble(100)) @@ -86,10 +90,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/516261269/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1350073844/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1480105708/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1473634574/10001 Reduce Output Operator sort order: Map-reduce partition columns: Index: ql/src/test/results/clientpositive/udf_10_trims.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_10_trims.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_10_trims.q.out (working copy) @@ -17,6 +17,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(86)) @@ -45,10 +47,10 @@ Move Operator files: hdfs directory: true - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2072143204/10000 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1953536526/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1442248983/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1071991378/10002 Reduce Output Operator sort order: Map-reduce partition columns: Index: ql/src/test/results/clientpositive/union16.q.out =================================================================== --- ql/src/test/results/clientpositive/union16.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union16.q.out (working copy) @@ -41,6 +41,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -62,6 +64,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -83,6 +87,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -104,6 +110,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -125,6 +133,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -146,6 +156,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -167,6 +179,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -188,6 +202,8 @@ expr: _col0 type: bigint null-subquery2:src-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -209,6 +225,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -230,6 +248,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -251,6 +271,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -272,6 +294,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -293,6 +317,8 @@ expr: _col0 type: bigint null-subquery1-subquery2:src-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -314,6 +340,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -335,6 +363,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -356,6 +386,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -377,6 +409,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -398,6 +432,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1:src + TableScan + alias: src Select Operator expressions: expr: key @@ -419,6 +455,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -440,6 +478,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -461,6 +501,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -482,6 +524,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -503,6 +547,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -524,6 +570,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -545,6 +593,8 @@ expr: _col0 type: bigint null-subquery1-subquery1-subquery2:src-subquery1-subquery1-subquery2:src + TableScan + alias: src Select Operator expressions: expr: key @@ -619,5 +669,5 @@ SELECT key, value FROM src UNION ALL SELECT key, value FROM src) src Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/957620610/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1669450040/10000 12500 Index: ql/src/test/results/clientpositive/ppd_gby_join.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby_join.q.out (revision 801363) +++ ql/src/test/results/clientpositive/ppd_gby_join.q.out (working copy) @@ -20,6 +20,8 @@ Map Reduce Alias -> Map Operator Tree: src2:src + TableScan + alias: src Filter Operator predicate: expr: ((key > '2') and (key <> '4')) @@ -46,6 +48,8 @@ expr: _col0 type: string src1:src + TableScan + alias: src Filter Operator predicate: expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) @@ -114,7 +118,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/119836077/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1620295142/10002 Reduce Output Operator key expressions: expr: _col0 Index: ql/src/test/results/clientpositive/input14.q.out =================================================================== --- ql/src/test/results/clientpositive/input14.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input14.q.out (working copy) @@ -19,6 +19,8 @@ Map Reduce Alias -> Map Operator Tree: tmap:src + TableScan + alias: src Select Operator expressions: expr: key @@ -96,7 +98,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/254878200/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1767626690/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -32,7 +34,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10003 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10003 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -48,6 +50,8 @@ limit: -1 Alias -> Map Local Operator Tree: x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -62,7 +66,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10003 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10003 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -72,9 +76,9 @@ escape.delim \ Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -89,14 +93,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10003 Select Operator expressions: expr: _col0 @@ -138,7 +142,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -152,7 +156,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Local Work: @@ -163,15 +167,20 @@ limit: -1 Alias -> Map Local Operator Tree: z + TableScan + alias: z Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(hr) = UDFToDouble(11))) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (ds = '2008-04-08') type: boolean Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(hr) = UDFToDouble(11)) type: boolean @@ -207,7 +216,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -221,14 +230,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10003 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10003 Partition input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -244,11 +253,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/306406773/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/448925002/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -264,9 +273,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/439867744/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919843627/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -281,7 +290,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Reduce Operator Tree: @@ -289,7 +298,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/306406773/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/448925002/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -302,7 +311,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 @@ -311,7 +320,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/306406773/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/448925002/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -325,10 +334,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/306406773/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/448925002/10001 query: INSERT OVERWRITE TABLE dest_j1 @@ -341,7 +350,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/782356912/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1275139268/10000 146 val_146 val_146 146 val_146 val_146 146 val_146 val_146 Index: ql/src/test/results/clientpositive/udf6.q.out =================================================================== --- ql/src/test/results/clientpositive/udf6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf6.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: if(true, 1, 2) @@ -35,7 +37,7 @@ query: SELECT IF(TRUE, 1, 2) FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1157930235/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/551851303/10000 1 query: EXPLAIN SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"), @@ -56,6 +58,8 @@ Map Reduce Alias -> Map Operator Tree: dest1 + TableScan + alias: dest1 Select Operator expressions: expr: if(true, 1, 2) @@ -100,5 +104,5 @@ CAST(128 AS INT), CAST(1.0 AS DOUBLE), CAST('128' AS STRING) FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/496695002/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/112479238/10000 1 2 2 a 0.1 2 126 128 128 1.0 128 Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -15,11 +15,15 @@ Map Reduce Alias -> Map Operator Tree: srcpart + TableScan + alias: srcpart Filter Operator + isSamplingPred: false predicate: expr: (((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-08')) and (hr = '12')) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-08')) and (hr = '12')) type: boolean @@ -48,7 +52,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/297808158/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/430187082/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -62,14 +66,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -88,7 +92,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -98,11 +102,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/297808158/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/880173234/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/430187082/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1548094656/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/297808158/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/430187082/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -120,9 +124,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/297808158/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/430187082/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/297808158/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/430187082/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -137,7 +141,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -145,7 +149,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/880173234/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1548094656/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -158,7 +162,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -167,7 +171,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/880173234/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1548094656/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -181,10 +185,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/880173234/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1548094656/10001 query: FROM srcpart @@ -193,7 +197,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/583441670/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1182712706/10000 86 val_86 12 2008-04-08 27 val_27 12 2008-04-08 98 val_98 12 2008-04-08 Index: ql/src/test/results/clientpositive/join17.q.out =================================================================== --- ql/src/test/results/clientpositive/join17.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join17.q.out (working copy) @@ -14,6 +14,8 @@ Map Reduce Alias -> Map Operator Tree: src2 + TableScan + alias: src2 Reduce Output Operator key expressions: expr: key @@ -29,6 +31,8 @@ expr: value type: string src1 + TableScan + alias: src1 Reduce Output Operator key expressions: expr: key @@ -45,9 +49,9 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -62,7 +66,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src Reduce Operator Tree: @@ -98,7 +102,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/415044925/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1811734881/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -112,7 +116,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -120,7 +124,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/415044925/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1811734881/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -134,10 +138,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/415044925/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1811734881/10001 query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) @@ -146,7 +150,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2109540342/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1330602702/10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/nullgroup3.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup3.q.out (revision 801363) +++ ql/src/test/results/clientpositive/nullgroup3.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: tstparttbl + TableScan + alias: tstparttbl Select Operator Group By Operator aggregations: @@ -54,7 +56,7 @@ query: select count(1) from tstparttbl Input: default/tstparttbl/ds=2008-04-08 Input: default/tstparttbl/ds=2008-04-09 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/672327853/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/618377726/10000 500 query: DROP TABLE tstparttbl2 query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE @@ -74,6 +76,8 @@ Map Reduce Alias -> Map Operator Tree: tstparttbl2 + TableScan + alias: tstparttbl2 Select Operator Group By Operator aggregations: @@ -112,7 +116,7 @@ query: select count(1) from tstparttbl2 Input: default/tstparttbl2/ds=2008-04-08 Input: default/tstparttbl2/ds=2008-04-09 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1422188471/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/135282100/10000 0 query: DROP TABLE tstparttbl query: CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE @@ -132,6 +136,8 @@ Map Reduce Alias -> Map Operator Tree: tstparttbl + TableScan + alias: tstparttbl Select Operator Group By Operator aggregations: @@ -170,7 +176,7 @@ query: select count(1) from tstparttbl Input: default/tstparttbl/ds=2008-04-08 Input: default/tstparttbl/ds=2008-04-09 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/159191500/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1316849116/10000 500 query: DROP TABLE tstparttbl2 query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE @@ -190,6 +196,8 @@ Map Reduce Alias -> Map Operator Tree: tstparttbl2 + TableScan + alias: tstparttbl2 Select Operator Group By Operator aggregations: @@ -228,7 +236,7 @@ query: select count(1) from tstparttbl2 Input: default/tstparttbl2/ds=2008-04-08 Input: default/tstparttbl2/ds=2008-04-09 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/409275423/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/420382735/10000 0 query: DROP TABLE tstparttbl query: DROP TABLE tstparttbl2 Index: ql/src/test/results/clientpositive/groupby6.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby6.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby6.q.out (working copy) @@ -15,6 +15,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: value @@ -46,7 +48,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/814045297/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/593503240/10002 Reduce Output Operator key expressions: expr: _col0 @@ -94,7 +96,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/473228216/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/855864385/10000 0 1 2 Index: ql/src/test/results/clientpositive/input_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part9.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input_part9.q.out (working copy) @@ -12,11 +12,15 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Filter Operator + isSamplingPred: false predicate: expr: (key is not null and (ds = '2008-04-08')) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (key is not null and (ds = '2008-04-08')) type: boolean @@ -34,7 +38,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/741456423/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1563301845/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -44,10 +48,10 @@ columns.types string:string:string:string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -66,10 +70,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -88,7 +92,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -96,11 +100,10 @@ Fetch Operator limit: -1 - query: SELECT x.* FROM SRCPART x WHERE key IS NOT NULL AND ds = '2008-04-08' Input: default/srcpart/ds=2008-04-08/hr=11 Input: default/srcpart/ds=2008-04-08/hr=12 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/675875634/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/261570775/10000 238 val_238 2008-04-08 11 86 val_86 2008-04-08 11 311 val_311 2008-04-08 11 Index: ql/src/test/results/clientpositive/input4_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input4_limit.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input4_limit.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -44,7 +46,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1809793683/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1741200337/10002 Reduce Output Operator key expressions: expr: _col0 @@ -73,7 +75,7 @@ query: select * from src sort by key limit 10 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1717158908/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/385052408/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/union9.q.out =================================================================== --- ql/src/test/results/clientpositive/union9.q.out (revision 801363) +++ ql/src/test/results/clientpositive/union9.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: null-subquery1-subquery1:unionsrc-subquery1-subquery1:s1 + TableScan + alias: s1 Select Operator expressions: expr: key @@ -37,6 +39,8 @@ expr: _col0 type: bigint null-subquery1-subquery2:unionsrc-subquery1-subquery2:s2 + TableScan + alias: s2 Select Operator expressions: expr: key @@ -58,6 +62,8 @@ expr: _col0 type: bigint null-subquery2:unionsrc-subquery2:s3 + TableScan + alias: s3 Select Operator expressions: expr: key @@ -105,5 +111,5 @@ select s2.key as key, s2.value as value from src s2 UNION ALL select s3.key as key, s3.value as value from src s3) unionsrc Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/1822308102/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1539323963/10000 1500 Index: ql/src/test/results/clientpositive/sort.q.out =================================================================== --- ql/src/test/results/clientpositive/sort.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sort.q.out (working copy) @@ -12,6 +12,8 @@ Map Reduce Alias -> Map Operator Tree: x + TableScan + alias: x Select Operator expressions: expr: key @@ -46,7 +48,7 @@ query: SELECT x.* FROM SRC x SORT BY key Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2055717166/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1302463941/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/mapreduce4.q.out =================================================================== --- ql/src/test/results/clientpositive/mapreduce4.q.out (revision 801363) +++ ql/src/test/results/clientpositive/mapreduce4.q.out (working copy) @@ -18,6 +18,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -99,7 +101,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/812197854/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/354616090/10000 90 9 0 val_90 90 9 0 val_90 90 9 0 val_90 Index: ql/src/test/results/clientpositive/udf_parse_url.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_parse_url.q.out (revision 801363) +++ ql/src/test/results/clientpositive/udf_parse_url.q.out (working copy) @@ -23,6 +23,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Filter Operator predicate: expr: (UDFToDouble(key) = UDFToDouble(86)) @@ -81,5 +83,5 @@ parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY') FROM src WHERE key = 86 Input: default/src -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/976767600/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/919076975/10000 facebook.com /path1/p.php k1=v1&k2=v2 Ref1 v2 v1 NULL /path1/p.php?k1=v1&k2=v2 http NULL facebook.com Index: ql/src/test/results/clientpositive/nullscript.q.out =================================================================== --- ql/src/test/results/clientpositive/nullscript.q.out (revision 801363) +++ ql/src/test/results/clientpositive/nullscript.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: nullscript + TableScan + alias: nullscript Select Operator expressions: expr: key @@ -40,7 +42,7 @@ query: select transform(key) using '/bin/cat' as key1 from nullscript Input: default/nullscript -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/809630857/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/614707845/10000 238 86 311 Index: ql/src/test/results/clientpositive/groupby5_noskew.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5_noskew.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby5_noskew.q.out (working copy) @@ -16,6 +16,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -86,7 +88,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1980389911/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1708211250/10000 0 0.0 10 10.0 100 200.0 Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 801363) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -12,7 +12,10 @@ Map Reduce Alias -> Map Operator Tree: b + TableScan + alias: b Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (hr = '14')) type: boolean @@ -29,7 +32,10 @@ expr: hr type: string a + TableScan + alias: a Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (hr = '11')) type: boolean @@ -47,9 +53,9 @@ type: string Needs Tagging: true Path -> Alias: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Path -> Partition: - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -68,7 +74,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart Reduce Operator Tree: @@ -80,6 +86,7 @@ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Filter Operator + isSamplingPred: false predicate: expr: ((((_col2 = '2008-04-08') and (_col3 = '11')) and (_col6 = '2008-04-08')) and (_col7 = '14')) type: boolean @@ -106,7 +113,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1797615685/10001 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1962615553/10001 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -122,4 +129,4 @@ query: select * from srcpart a join srcpart b where a.ds = '2008-04-08' and a.hr = '11' and b.ds = '2008-04-08' and b.hr = '14' limit 5 Input: default/srcpart/ds=2008-04-08/hr=11 -Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1293686104/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/107545677/10000 Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 801363) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -16,11 +16,15 @@ Map Reduce Alias -> Map Operator Tree: s + TableScan + alias: s Filter Operator + isSamplingPred: false predicate: expr: (((hash(key) & 2147483647) % 5) = 0) type: boolean Filter Operator + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 5) = 0) type: boolean @@ -34,7 +38,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/815713794/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/447406646/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -48,14 +52,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -71,7 +75,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket @@ -81,11 +85,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/815713794/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1602573034/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/447406646/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/34445153/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/815713794/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/447406646/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -99,9 +103,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/815713794/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/447406646/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/815713794/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/447406646/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -116,7 +120,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 Reduce Operator Tree: @@ -124,7 +128,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1602573034/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/34445153/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -137,7 +141,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 @@ -146,7 +150,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1602573034/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/34445153/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -160,10 +164,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1602573034/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/34445153/10001 query: INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test @@ -172,7 +176,7 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 SORT BY key, value Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/673732668/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/575874993/10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 801363) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -17,15 +17,20 @@ Map Reduce Alias -> Map Operator Tree: z + TableScan + alias: z Filter Operator + isSamplingPred: false predicate: expr: ((ds = '2008-04-08') and (UDFToDouble(hr) = UDFToDouble(11))) type: boolean Filter Operator + isSamplingPred: false predicate: expr: (ds = '2008-04-08') type: boolean Filter Operator + isSamplingPred: false predicate: expr: (UDFToDouble(hr) = UDFToDouble(11)) type: boolean @@ -64,7 +69,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/140704477/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179223743/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -78,7 +83,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Local Work: @@ -92,6 +97,8 @@ limit: -1 Alias -> Map Local Operator Tree: y + TableScan + alias: y Common Join Operator condition map: Inner Join 0 to 1 @@ -127,7 +134,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/140704477/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179223743/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -141,10 +148,12 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 x + TableScan + alias: x Common Join Operator condition map: Inner Join 0 to 1 @@ -180,7 +189,7 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/140704477/10002 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179223743/10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -194,14 +203,14 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition partition values: ds 2008-04-08 @@ -220,7 +229,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart @@ -230,11 +239,11 @@ Move Operator files: hdfs directory: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/140704477/10002 - destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/757864097/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179223743/10002 + destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/642786358/10000 Map Reduce Alias -> Map Operator Tree: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/140704477/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179223743/10002 Reduce Output Operator sort order: Map-reduce partition columns: @@ -250,9 +259,9 @@ type: string Needs Tagging: false Path -> Alias: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/140704477/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179223743/10002 Path -> Partition: - file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/140704477/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179223743/10002 Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -267,7 +276,7 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 Reduce Operator Tree: @@ -275,7 +284,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/757864097/10000 + directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/642786358/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -288,7 +297,7 @@ bucket_count -1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 @@ -297,7 +306,7 @@ Move Operator tables: replace: true - source: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/757864097/10000 + source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/642786358/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -311,10 +320,10 @@ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest_j1 + location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest_j1 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/757864097/10001 + tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/642786358/10001 query: INSERT OVERWRITE TABLE dest_j1 @@ -327,7 +336,7 @@ Output: default/dest_j1 query: select * from dest_j1 x order by x.key Input: default/dest_j1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1298491747/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1506563354/10000 128 val_128 val_128 128 val_128 val_128 128 val_128 val_128 Index: ql/src/test/results/clientpositive/groupby5_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby5_map.q.out (revision 801363) +++ ql/src/test/results/clientpositive/groupby5_map.q.out (working copy) @@ -13,6 +13,8 @@ Map Reduce Alias -> Map Operator Tree: src + TableScan + alias: src Select Operator expressions: expr: key @@ -70,5 +72,5 @@ Output: default/dest1 query: SELECT dest1.* FROM dest1 Input: default/dest1 -Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/777816939/10000 +Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/136910930/10000 130091 Index: ql/src/test/results/compiler/plan/join2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join2.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join2.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/2135515220/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2128820424/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/2135515220/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2128820424/10001 @@ -173,7 +173,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -202,6 +202,9 @@ _col0 + + src1 + @@ -259,6 +262,9 @@ _col0 + + src1 + @@ -300,6 +306,9 @@ _col2 + + src2 + @@ -488,6 +497,9 @@ value + + src3 + @@ -521,6 +533,9 @@ key + + src3 + @@ -657,6 +672,13 @@ + + + + src3 + + + @@ -711,7 +733,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1642606416/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1350964634/10002 $INTNAME @@ -719,7 +741,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src3 @@ -731,7 +753,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1642606416/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1350964634/10002 @@ -765,7 +787,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -809,7 +831,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/2135515220/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2128820424/10000 @@ -863,6 +885,9 @@ _col5 + + src3 + @@ -874,6 +899,9 @@ _col2 + + src1 + @@ -952,6 +980,9 @@ VALUE._col1 + + src3 + @@ -963,6 +994,9 @@ VALUE._col2 + + src1 + @@ -1185,7 +1219,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1253,7 +1287,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1282,6 +1316,9 @@ key + + src2 + @@ -1298,6 +1335,9 @@ key + + src2 + @@ -1424,6 +1464,13 @@ + + + + src2 + + + @@ -1476,6 +1523,9 @@ key + + src1 + @@ -1492,6 +1542,9 @@ key + + src1 + @@ -1615,6 +1668,13 @@ + + + + src1 + + + @@ -1666,7 +1726,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src2 @@ -1681,7 +1741,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1718,7 +1778,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1642606416/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1350964634/10002 @@ -1747,6 +1807,9 @@ VALUE._col0 + + src2 + @@ -1758,6 +1821,9 @@ VALUE._col0 + + src1 + Index: ql/src/test/results/compiler/plan/input2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input2.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input2.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10006 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10006 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10006 - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10006 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10006 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -523,7 +532,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10006 @@ -573,7 +582,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10002 @@ -626,7 +635,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest2 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 @@ -636,7 +645,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10003 @@ -664,10 +673,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10007 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10002 @@ -685,7 +694,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10007 @@ -777,6 +786,9 @@ key + + + @@ -787,6 +799,9 @@ value + + + @@ -880,10 +895,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10007 - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10007 @@ -892,7 +907,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10007 @@ -910,7 +925,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10002 @@ -959,7 +974,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest2 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 file.outputformat @@ -995,6 +1010,9 @@ VALUE + + + @@ -1058,7 +1076,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10007 @@ -1117,7 +1135,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10004 @@ -1170,7 +1188,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest3 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest3 @@ -1180,7 +1198,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10005 @@ -1208,10 +1226,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10008 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10004 @@ -1229,7 +1247,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10008 @@ -1321,6 +1339,9 @@ key + + + @@ -1331,6 +1352,9 @@ value + + + @@ -1424,10 +1448,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10008 - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10008 @@ -1436,7 +1460,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10008 @@ -1454,7 +1478,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1213900208/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1901996899/10004 @@ -1507,7 +1531,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest3 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest3 file.outputformat @@ -1543,6 +1567,9 @@ VALUE + + + @@ -1610,7 +1637,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10008 @@ -1698,7 +1725,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1733,7 +1760,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10006 @@ -1762,6 +1789,9 @@ value + + src + @@ -1773,6 +1803,9 @@ key + + src + @@ -1890,6 +1923,9 @@ key + + src + @@ -2011,7 +2047,7 @@ 2 - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10007 @@ -2040,6 +2076,9 @@ value + + src + @@ -2051,6 +2090,9 @@ key + + src + @@ -2185,6 +2227,9 @@ key + + src + @@ -2289,6 +2334,9 @@ key + + src + @@ -2395,7 +2443,7 @@ 3 - file:/data/users/emil/hive1/hive1/build/ql/tmp/767211150/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/184329433/10008 @@ -2435,6 +2483,9 @@ key + + src + @@ -2549,6 +2600,9 @@ key + + src + @@ -2631,6 +2685,13 @@ + + + + src + + + @@ -2655,7 +2716,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -2667,7 +2728,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/join3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join3.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join3.q.xml (working copy) @@ -26,7 +26,7 @@ true - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/tmp/1833177717/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1166257470/10000 @@ -79,7 +79,7 @@ location - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -89,7 +89,7 @@ - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/tmp/1833177717/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1166257470/10001 @@ -162,7 +162,7 @@ location - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -230,7 +230,7 @@ location - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -298,7 +298,7 @@ location - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -331,6 +331,9 @@ key + + src2 + @@ -442,6 +445,13 @@ + + + + src2 + + + @@ -494,6 +504,9 @@ value + + src3 + @@ -510,6 +523,9 @@ key + + src3 + @@ -636,6 +652,13 @@ + + + + src3 + + + @@ -691,6 +714,9 @@ key + + src1 + @@ -707,6 +733,9 @@ key + + src1 + @@ -830,6 +859,13 @@ + + + + src1 + + + @@ -881,7 +917,7 @@ - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src2 @@ -899,7 +935,7 @@ - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -943,7 +979,7 @@ 1 - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/tmp/1833177717/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1166257470/10000 @@ -997,6 +1033,9 @@ _col5 + + src3 + @@ -1008,6 +1047,9 @@ _col0 + + src1 + @@ -1086,6 +1128,9 @@ VALUE._col1 + + src3 + @@ -1097,6 +1142,9 @@ VALUE._col0 + + src1 + Index: ql/src/test/results/compiler/plan/input3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input3.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input3.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10000 @@ -83,7 +83,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10001 @@ -121,10 +121,10 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10007 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10000 @@ -142,7 +142,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10007 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10007 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10007 @@ -357,7 +363,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10007 @@ -375,7 +381,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10000 @@ -424,7 +430,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -523,7 +532,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10007 @@ -573,7 +582,7 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10002 @@ -626,7 +635,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest2 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 @@ -636,7 +645,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10003 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10003 @@ -664,10 +673,10 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10008 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10002 @@ -685,7 +694,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10008 @@ -777,6 +786,9 @@ key + + + @@ -787,6 +799,9 @@ value + + + @@ -880,10 +895,10 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10008 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10008 @@ -892,7 +907,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10008 @@ -910,7 +925,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10002 @@ -959,7 +974,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest2 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest2 file.outputformat @@ -995,6 +1010,9 @@ VALUE + + + @@ -1058,7 +1076,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10008 @@ -1117,7 +1135,7 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10004 @@ -1170,7 +1188,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest3 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest3 @@ -1180,7 +1198,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10005 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10005 @@ -1208,10 +1226,10 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10009 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10004 @@ -1229,7 +1247,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10009 @@ -1321,6 +1339,9 @@ key + + + @@ -1331,6 +1352,9 @@ value + + + @@ -1424,10 +1448,10 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10009 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10009 @@ -1436,7 +1460,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10009 @@ -1454,7 +1478,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10004 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10004 @@ -1507,7 +1531,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest3 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest3 file.outputformat @@ -1543,6 +1567,9 @@ VALUE + + + @@ -1610,7 +1637,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10009 @@ -1663,7 +1690,7 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10006 ../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out @@ -1694,10 +1721,10 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10010 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10006 @@ -1715,7 +1742,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10010 @@ -1804,6 +1831,9 @@ _col0 + + + @@ -1887,10 +1917,10 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10010 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10010 @@ -1899,7 +1929,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10010 @@ -1943,7 +1973,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2027608798/10006 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/754238577/10006 @@ -1997,6 +2027,9 @@ VALUE + + + @@ -2050,7 +2083,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10010 @@ -2138,7 +2171,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -2173,7 +2206,7 @@ 1 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10007 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10007 @@ -2202,6 +2235,9 @@ value + + src + @@ -2213,6 +2249,9 @@ key + + src + @@ -2330,6 +2369,9 @@ key + + src + @@ -2451,7 +2493,7 @@ 2 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10008 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10008 @@ -2480,6 +2522,9 @@ value + + src + @@ -2491,6 +2536,9 @@ key + + src + @@ -2625,6 +2673,9 @@ key + + src + @@ -2729,6 +2780,9 @@ key + + src + @@ -2835,7 +2889,7 @@ 3 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10009 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10009 @@ -2875,6 +2929,9 @@ key + + src + @@ -3009,6 +3066,9 @@ key + + src + @@ -3113,6 +3173,9 @@ key + + src + @@ -3219,7 +3282,7 @@ 4 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1737903815/10010 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1613627590/10010 @@ -3248,6 +3311,9 @@ value + + src + @@ -3346,6 +3412,9 @@ key + + src + @@ -3428,6 +3497,13 @@ + + + + src + + + @@ -3452,7 +3528,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -3464,7 +3540,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join4.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -132,7 +132,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -351,6 +351,9 @@ value + + src1 + @@ -362,6 +365,9 @@ key + + src1 + @@ -496,6 +502,9 @@ key + + src1 + @@ -612,6 +621,9 @@ key + + src1 + @@ -762,6 +774,9 @@ key + + src1 + @@ -842,6 +857,9 @@ key + + src1 + @@ -927,6 +945,13 @@ + + + + src1 + + + @@ -1150,6 +1175,9 @@ value + + src2 + @@ -1161,6 +1189,9 @@ key + + src2 + @@ -1295,6 +1326,9 @@ key + + src2 + @@ -1399,6 +1433,9 @@ key + + src2 + @@ -1549,6 +1586,9 @@ key + + src2 + @@ -1629,6 +1669,9 @@ key + + src2 + @@ -1714,6 +1757,13 @@ + + + + src2 + + + @@ -1747,7 +1797,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src c:a:src1 @@ -1762,7 +1812,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1807,7 +1857,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1749859558/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1108512498/10001 @@ -2050,6 +2100,9 @@ _col3 + + b + @@ -2061,6 +2114,9 @@ _col2 + + b + @@ -2072,6 +2128,9 @@ _col1 + + a + @@ -2083,6 +2142,9 @@ _col0 + + a + @@ -2193,6 +2255,9 @@ VALUE._col1 + + b + @@ -2204,6 +2269,9 @@ VALUE._col0 + + b + @@ -2215,6 +2283,9 @@ VALUE._col1 + + a + @@ -2226,6 +2297,9 @@ VALUE._col0 + + a + Index: ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input4.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -26,7 +26,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/360715545/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1347989740/10000 @@ -79,7 +79,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -89,7 +89,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/360715545/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1347989740/10001 @@ -162,7 +162,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -466,6 +466,9 @@ value + + src + @@ -477,6 +480,9 @@ key + + src + @@ -547,6 +553,13 @@ + + + + src + + + @@ -598,7 +611,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src tmap:src @@ -610,7 +623,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -658,7 +671,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/360715545/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1347989740/10000 @@ -712,6 +725,9 @@ _col1 + + tmap + @@ -723,6 +739,9 @@ _col0 + + tmap + @@ -837,6 +856,9 @@ _col0 + + tmap + @@ -959,6 +981,9 @@ VALUE + + + Index: ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join5.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -132,7 +132,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -351,6 +351,9 @@ value + + src1 + @@ -362,6 +365,9 @@ key + + src1 + @@ -496,6 +502,9 @@ key + + src1 + @@ -612,6 +621,9 @@ key + + src1 + @@ -762,6 +774,9 @@ key + + src1 + @@ -842,6 +857,9 @@ key + + src1 + @@ -927,6 +945,13 @@ + + + + src1 + + + @@ -1150,6 +1175,9 @@ value + + src2 + @@ -1161,6 +1189,9 @@ key + + src2 + @@ -1295,6 +1326,9 @@ key + + src2 + @@ -1399,6 +1433,9 @@ key + + src2 + @@ -1549,6 +1586,9 @@ key + + src2 + @@ -1629,6 +1669,9 @@ key + + src2 + @@ -1714,6 +1757,13 @@ + + + + src2 + + + @@ -1747,7 +1797,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src c:a:src1 @@ -1762,7 +1812,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1807,7 +1857,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/730885145/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/908646882/10001 @@ -2050,6 +2100,9 @@ _col3 + + b + @@ -2061,6 +2114,9 @@ _col2 + + b + @@ -2072,6 +2128,9 @@ _col1 + + a + @@ -2083,6 +2142,9 @@ _col0 + + a + @@ -2193,6 +2255,9 @@ VALUE._col1 + + b + @@ -2204,6 +2269,9 @@ VALUE._col0 + + b + @@ -2215,6 +2283,9 @@ VALUE._col1 + + a + @@ -2226,6 +2297,9 @@ VALUE._col0 + + a + Index: ql/src/test/results/compiler/plan/input5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input5.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input5.q.xml (working copy) @@ -26,7 +26,7 @@ true - file:/data/users/zshao/tools/deploy-trunk-apache-hive/.ptest_3/build/ql/tmp/129277062/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1695012277/10000 @@ -79,7 +79,7 @@ location - file:/data/users/zshao/tools/deploy-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -89,7 +89,7 @@ - file:/data/users/zshao/tools/deploy-trunk-apache-hive/.ptest_3/build/ql/tmp/129277062/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1695012277/10001 @@ -166,7 +166,7 @@ location - file:/data/users/zshao/tools/deploy-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift @@ -470,6 +470,9 @@ lintstring + + src_thrift + @@ -485,6 +488,9 @@ lint + + src_thrift + @@ -563,6 +569,13 @@ + + + + src_thrift + + + @@ -665,7 +678,7 @@ - file:/data/users/zshao/tools/deploy-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift tmap:src_thrift @@ -677,7 +690,7 @@ - file:/data/users/zshao/tools/deploy-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift @@ -721,7 +734,7 @@ 1 - file:/data/users/zshao/tools/deploy-trunk-apache-hive/.ptest_3/build/ql/tmp/129277062/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1695012277/10000 @@ -775,6 +788,9 @@ _col1 + + tmap + @@ -786,6 +802,9 @@ _col0 + + tmap + @@ -863,6 +882,9 @@ VALUE + + + Index: ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join6.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -132,7 +132,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -351,6 +351,9 @@ value + + src1 + @@ -362,6 +365,9 @@ key + + src1 + @@ -496,6 +502,9 @@ key + + src1 + @@ -612,6 +621,9 @@ key + + src1 + @@ -762,6 +774,9 @@ key + + src1 + @@ -842,6 +857,9 @@ key + + src1 + @@ -927,6 +945,13 @@ + + + + src1 + + + @@ -1150,6 +1175,9 @@ value + + src2 + @@ -1161,6 +1189,9 @@ key + + src2 + @@ -1295,6 +1326,9 @@ key + + src2 + @@ -1399,6 +1433,9 @@ key + + src2 + @@ -1549,6 +1586,9 @@ key + + src2 + @@ -1629,6 +1669,9 @@ key + + src2 + @@ -1714,6 +1757,13 @@ + + + + src2 + + + @@ -1747,7 +1797,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src c:a:src1 @@ -1762,7 +1812,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1807,7 +1857,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/694636114/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/518885693/10001 @@ -2050,6 +2100,9 @@ _col3 + + b + @@ -2061,6 +2114,9 @@ _col2 + + b + @@ -2072,6 +2128,9 @@ _col1 + + a + @@ -2083,6 +2142,9 @@ _col0 + + a + @@ -2193,6 +2255,9 @@ VALUE._col1 + + b + @@ -2204,6 +2269,9 @@ VALUE._col0 + + b + @@ -2215,6 +2283,9 @@ VALUE._col1 + + a + @@ -2226,6 +2297,9 @@ VALUE._col0 + + a + Index: ql/src/test/results/compiler/plan/input_testxpath2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy) @@ -68,7 +68,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift @@ -104,7 +104,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1477107408/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2063274133/10001 @@ -202,6 +202,9 @@ mstringstring + + src_thrift + @@ -238,6 +241,9 @@ lintstring + + src_thrift + @@ -267,6 +273,9 @@ lint + + src_thrift + @@ -398,6 +407,9 @@ lint + + src_thrift + @@ -443,6 +455,9 @@ mstringstring + + src_thrift + @@ -584,6 +599,9 @@ lint + + src_thrift + @@ -618,6 +636,9 @@ mstringstring + + src_thrift + @@ -673,6 +694,13 @@ + + + + src_thrift + + + @@ -700,7 +728,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift src_thrift @@ -712,7 +740,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift Index: ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input6.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1302875053/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/4331220/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1302875053/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/4331220/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/833154369/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1927961056/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1302875053/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/4331220/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/833154369/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1927961056/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/833154369/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1927961056/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/833154369/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1927961056/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/833154369/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1927961056/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1302875053/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/4331220/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -523,7 +532,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/833154369/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1927961056/10002 @@ -611,7 +620,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 @@ -650,7 +659,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/833154369/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1927961056/10002 @@ -679,6 +688,9 @@ value + + src1 + @@ -690,6 +702,9 @@ key + + src1 + @@ -771,6 +786,9 @@ key + + src1 + @@ -842,6 +860,9 @@ key + + src1 + @@ -877,6 +898,13 @@ + + + + src1 + + + @@ -901,7 +929,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 src1 @@ -913,7 +941,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 Index: ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join7.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -132,7 +132,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -200,7 +200,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -419,6 +419,9 @@ value + + src1 + @@ -430,6 +433,9 @@ key + + src1 + @@ -564,6 +570,9 @@ key + + src1 + @@ -680,6 +689,9 @@ key + + src1 + @@ -830,6 +842,9 @@ key + + src1 + @@ -910,6 +925,9 @@ key + + src1 + @@ -995,6 +1013,13 @@ + + + + src1 + + + @@ -1218,6 +1243,9 @@ value + + src2 + @@ -1229,6 +1257,9 @@ key + + src2 + @@ -1363,6 +1394,9 @@ key + + src2 + @@ -1467,6 +1501,9 @@ key + + src2 + @@ -1617,6 +1654,9 @@ key + + src2 + @@ -1697,6 +1737,9 @@ key + + src2 + @@ -1782,6 +1825,13 @@ + + + + src2 + + + @@ -2005,6 +2055,9 @@ value + + src3 + @@ -2016,6 +2069,9 @@ key + + src3 + @@ -2150,6 +2206,9 @@ key + + src3 + @@ -2254,6 +2313,9 @@ key + + src3 + @@ -2404,6 +2466,9 @@ key + + src3 + @@ -2484,6 +2549,9 @@ key + + src3 + @@ -2569,6 +2637,13 @@ + + + + src3 + + + @@ -2602,7 +2677,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src c:a:src1 @@ -2620,7 +2695,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -2665,7 +2740,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/625369050/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/179247082/10001 @@ -2982,6 +3057,9 @@ _col5 + + c + @@ -2993,6 +3071,9 @@ _col4 + + c + @@ -3004,6 +3085,9 @@ _col3 + + b + @@ -3015,6 +3099,9 @@ _col2 + + b + @@ -3026,6 +3113,9 @@ _col1 + + a + @@ -3037,6 +3127,9 @@ _col0 + + a + @@ -3179,6 +3272,9 @@ VALUE._col1 + + c + @@ -3190,6 +3286,9 @@ VALUE._col0 + + c + @@ -3201,6 +3300,9 @@ VALUE._col1 + + b + @@ -3212,6 +3314,9 @@ VALUE._col0 + + b + @@ -3223,6 +3328,9 @@ VALUE._col1 + + a + @@ -3234,6 +3342,9 @@ VALUE._col0 + + a + Index: ql/src/test/results/compiler/plan/input7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input7.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input7.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1947439546/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/596164267/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1947439546/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/596164267/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1658664569/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1045855320/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1947439546/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/596164267/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1658664569/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1045855320/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1658664569/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1045855320/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1658664569/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1045855320/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1658664569/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1045855320/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1947439546/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/596164267/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1658664569/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1045855320/10002 @@ -615,7 +624,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 @@ -646,7 +655,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1658664569/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1045855320/10002 @@ -675,6 +684,9 @@ key + + src1 + @@ -753,6 +765,13 @@ + + + + src1 + + + @@ -795,7 +814,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 src1 @@ -807,7 +826,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 Index: ql/src/test/results/compiler/plan/input8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input8.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input8.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 @@ -92,7 +92,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1657019249/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/114537537/10001 @@ -268,6 +268,9 @@ key + + src1 + @@ -464,6 +467,13 @@ + + + + src1 + + + @@ -506,7 +516,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 src1 @@ -518,7 +528,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -132,7 +132,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -351,6 +351,9 @@ value + + src1 + @@ -362,6 +365,9 @@ key + + src1 + @@ -496,6 +502,9 @@ key + + src1 + @@ -612,6 +621,9 @@ key + + src1 + @@ -762,6 +774,9 @@ key + + src1 + @@ -842,6 +857,9 @@ key + + src1 + @@ -927,6 +945,13 @@ + + + + src1 + + + @@ -1150,6 +1175,9 @@ value + + src2 + @@ -1161,6 +1189,9 @@ key + + src2 + @@ -1295,6 +1326,9 @@ key + + src2 + @@ -1399,6 +1433,9 @@ key + + src2 + @@ -1549,6 +1586,9 @@ key + + src2 + @@ -1629,6 +1669,9 @@ key + + src2 + @@ -1714,6 +1757,13 @@ + + + + src2 + + + @@ -1747,7 +1797,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src c:a:src1 @@ -1762,7 +1812,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1811,7 +1861,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1490081547/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/825921920/10001 @@ -2195,6 +2245,9 @@ _col3 + + b + @@ -2206,6 +2259,9 @@ _col2 + + b + @@ -2217,6 +2273,9 @@ _col1 + + a + @@ -2228,6 +2287,9 @@ _col0 + + a + @@ -2310,6 +2372,9 @@ VALUE._col1 + + b + @@ -2321,6 +2386,9 @@ VALUE._col0 + + b + @@ -2332,6 +2400,9 @@ VALUE._col1 + + a + @@ -2343,6 +2414,9 @@ VALUE._col0 + + a + Index: ql/src/test/results/compiler/plan/input_testsequencefile.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/2123516814/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1429814076/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest4_sequencefile + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest4_sequencefile @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/2123516814/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1429814076/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/2065861925/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266180825/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/2123516814/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1429814076/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/2065861925/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266180825/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/2065861925/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266180825/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/2065861925/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266180825/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/2065861925/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266180825/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/2123516814/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1429814076/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest4_sequencefile + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest4_sequencefile file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -523,7 +532,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/2065861925/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266180825/10002 @@ -611,7 +620,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -642,7 +651,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/2065861925/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1266180825/10002 @@ -671,6 +680,9 @@ value + + src + @@ -682,6 +694,9 @@ key + + src + @@ -752,6 +767,13 @@ + + + + src + + + @@ -797,7 +819,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -809,7 +831,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- ql/src/test/results/compiler/plan/union.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -33,7 +33,7 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/363277583/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/629731800/10000 ../build/ql/test/data/warehouse/union.out @@ -64,10 +64,10 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1576819664/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1806940690/10001 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/363277583/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/629731800/10000 @@ -85,7 +85,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1576819664/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1806940690/10001 @@ -181,6 +181,9 @@ _col0 + + + @@ -195,6 +198,9 @@ _col1 + + + @@ -288,10 +294,10 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1576819664/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1806940690/10001 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1576819664/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1806940690/10001 @@ -300,7 +306,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1576819664/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1806940690/10001 @@ -344,7 +350,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/363277583/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/629731800/10000 @@ -398,6 +404,9 @@ VALUE + + + @@ -461,7 +470,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1576819664/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1806940690/10001 @@ -549,7 +558,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -617,7 +626,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -664,7 +673,7 @@ 1 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1576819664/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1806940690/10001 @@ -693,6 +702,9 @@ _col1 + + src + @@ -704,6 +716,9 @@ _col0 + + src + @@ -802,6 +817,9 @@ value + + src + @@ -813,6 +831,9 @@ key + + src + @@ -902,6 +923,9 @@ key + + src + @@ -1012,6 +1036,9 @@ key + + src + @@ -1080,6 +1107,13 @@ + + + + src + + + @@ -1216,6 +1250,9 @@ value + + src + @@ -1227,6 +1264,9 @@ key + + src + @@ -1327,6 +1367,9 @@ key + + src + @@ -1457,6 +1500,9 @@ key + + src + @@ -1532,6 +1578,13 @@ + + + + src + + + @@ -1560,7 +1613,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src null-subquery1:unioninput-subquery1:src @@ -1575,7 +1628,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input9.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/789789200/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1598259567/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/789789200/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1598259567/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1105660904/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2075389576/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/789789200/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1598259567/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1105660904/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2075389576/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1105660904/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2075389576/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1105660904/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2075389576/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1105660904/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2075389576/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/789789200/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1598259567/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1105660904/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2075389576/10002 @@ -615,7 +624,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 @@ -654,7 +663,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1105660904/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2075389576/10002 @@ -683,6 +692,9 @@ key + + src1 + @@ -908,6 +920,13 @@ + + + + src1 + + + @@ -929,7 +948,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 src1 @@ -941,7 +960,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src1 Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -100,7 +100,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1572884373/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1014883785/10001 @@ -1600,6 +1600,9 @@ key + + src + @@ -1738,6 +1741,9 @@ key + + src + @@ -1813,6 +1819,13 @@ + + + + src + + + @@ -1834,7 +1847,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -1846,7 +1859,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/udf4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf4.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/udf4.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -92,7 +92,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1880463950/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/553818733/10001 @@ -1568,6 +1568,13 @@ + + + + dest1 + + + @@ -1610,7 +1617,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 dest1 @@ -1622,7 +1629,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 Index: ql/src/test/results/compiler/plan/input_testxpath.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input_testxpath.q.xml (working copy) @@ -68,7 +68,7 @@ location - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift @@ -96,7 +96,7 @@ - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/tmp/312015303/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2102365199/10001 @@ -198,6 +198,9 @@ mstringstring + + src_thrift + @@ -242,6 +245,9 @@ lintstring + + src_thrift + @@ -292,6 +298,9 @@ lint + + src_thrift + @@ -402,6 +411,13 @@ + + + + src_thrift + + + @@ -494,7 +510,7 @@ - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift src_thrift @@ -506,7 +522,7 @@ - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift Index: ql/src/test/results/compiler/plan/udf6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf6.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -92,7 +92,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/523579366/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/16617222/10001 @@ -339,6 +339,13 @@ + + + + src + + + @@ -377,7 +384,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -389,7 +396,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -77,7 +77,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart @@ -113,7 +113,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1116969381/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/247376983/10001 @@ -217,6 +217,9 @@ ds + + srcpart + @@ -228,6 +231,9 @@ hr + + srcpart + @@ -239,6 +245,9 @@ value + + srcpart + @@ -250,6 +259,9 @@ key + + srcpart + @@ -436,6 +448,9 @@ key + + srcpart + @@ -535,6 +550,9 @@ ds + + srcpart + @@ -595,6 +613,9 @@ hr + + srcpart + @@ -738,6 +759,9 @@ key + + srcpart + @@ -808,6 +832,9 @@ ds + + srcpart + @@ -858,6 +885,9 @@ hr + + srcpart + @@ -913,6 +943,13 @@ + + + + srcpart + + + @@ -943,7 +980,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 srcpart @@ -955,7 +992,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Index: ql/src/test/results/compiler/plan/groupby1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby1.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/groupby1.q.xml (working copy) @@ -26,7 +26,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1224102099/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/347896575/10000 @@ -79,7 +79,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -89,7 +89,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1224102099/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/347896575/10001 @@ -162,7 +162,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -199,6 +199,9 @@ _col0 + + + @@ -278,6 +281,9 @@ _col1 + + + @@ -367,6 +373,9 @@ key + + src + @@ -418,6 +427,9 @@ value + + src + @@ -525,6 +537,9 @@ key + + src + @@ -535,6 +550,9 @@ value + + src + @@ -596,6 +614,13 @@ + + + + src + + + @@ -626,7 +651,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -638,7 +663,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -682,7 +707,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1224102099/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/347896575/10000 @@ -736,6 +761,9 @@ _col1 + + + @@ -747,6 +775,9 @@ _col0 + + + Index: ql/src/test/results/compiler/plan/groupby2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby2.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/groupby2.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -101,6 +101,9 @@ _col0 + + + @@ -116,6 +119,9 @@ _col1 + + + @@ -204,6 +210,9 @@ _col2 + + + @@ -218,6 +227,9 @@ _col3 + + + @@ -347,6 +359,9 @@ value + + src + @@ -405,6 +420,9 @@ key + + src + @@ -489,6 +507,9 @@ value + + src + @@ -556,6 +577,9 @@ value + + src + @@ -688,6 +712,9 @@ key + + src + @@ -698,6 +725,9 @@ value + + src + @@ -759,6 +789,13 @@ + + + + src + + + @@ -789,7 +826,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -801,7 +838,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -842,7 +879,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1735787192/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1399819330/10001 @@ -952,6 +989,9 @@ _col0 + + + @@ -979,6 +1019,9 @@ _col2 + + + @@ -1010,6 +1053,9 @@ _col1 + + + @@ -1021,6 +1067,9 @@ _col0 + + + Index: ql/src/test/results/compiler/plan/udf_case.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -96,7 +96,7 @@ - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/tmp/2137070158/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1476498286/10001 @@ -404,6 +404,13 @@ + + + + src + + + @@ -446,7 +453,7 @@ - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -458,7 +465,7 @@ - file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- ql/src/test/results/compiler/plan/subq.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -33,7 +33,7 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1569526871/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1508087850/10000 ../build/ql/test/data/warehouse/union.out @@ -64,10 +64,10 @@ true - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2055534581/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1592604126/10001 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1569526871/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1508087850/10000 @@ -85,7 +85,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2055534581/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1592604126/10001 @@ -181,6 +181,9 @@ _col0 + + + @@ -195,6 +198,9 @@ _col1 + + + @@ -288,10 +294,10 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2055534581/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1592604126/10001 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2055534581/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1592604126/10001 @@ -300,7 +306,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2055534581/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1592604126/10001 @@ -344,7 +350,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1569526871/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1508087850/10000 @@ -398,6 +404,9 @@ VALUE + + + @@ -461,7 +470,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2055534581/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1592604126/10001 @@ -549,7 +558,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -592,7 +601,7 @@ 1 - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/2055534581/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1592604126/10001 @@ -621,6 +630,9 @@ _col1 + + src + @@ -632,6 +644,9 @@ _col0 + + src + @@ -713,6 +728,9 @@ value + + src + @@ -724,6 +742,9 @@ key + + src + @@ -841,6 +862,9 @@ key + + src + @@ -979,6 +1003,9 @@ key + + src + @@ -1054,6 +1081,13 @@ + + + + src + + + @@ -1078,7 +1112,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src unioninput:src @@ -1090,7 +1124,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/groupby3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby3.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/groupby3.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -101,6 +101,9 @@ _col0 + + + @@ -192,6 +195,9 @@ _col1 + + + @@ -206,6 +212,9 @@ _col2 + + + @@ -216,6 +225,9 @@ _col3 + + + @@ -226,6 +238,9 @@ _col4 + + + @@ -236,6 +251,9 @@ _col5 + + + @@ -381,6 +399,9 @@ value + + src + @@ -456,6 +477,9 @@ value + + src + @@ -523,6 +547,9 @@ value + + src + @@ -593,6 +620,9 @@ value + + src + @@ -664,6 +694,9 @@ value + + src + @@ -735,6 +768,9 @@ value + + src + @@ -890,6 +926,9 @@ value + + src + @@ -938,6 +977,13 @@ + + + + src + + + @@ -979,7 +1025,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -991,7 +1037,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -1032,7 +1078,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/523008270/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/471792069/10001 @@ -1142,6 +1188,9 @@ _col4 + + + @@ -1153,6 +1202,9 @@ _col3 + + + @@ -1164,6 +1216,9 @@ _col2 + + + @@ -1175,6 +1230,9 @@ _col1 + + + @@ -1186,6 +1244,9 @@ _col0 + + + Index: ql/src/test/results/compiler/plan/groupby4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby4.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/groupby4.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -101,6 +101,9 @@ _col0 + + + @@ -263,6 +266,9 @@ key + + src + @@ -368,6 +374,9 @@ key + + src + @@ -416,6 +425,13 @@ + + + + src + + + @@ -457,7 +473,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -469,7 +485,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -510,7 +526,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/207061942/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1227320587/10001 @@ -580,6 +596,9 @@ _col0 + + + Index: ql/src/test/results/compiler/plan/groupby5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby5.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/groupby5.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -101,6 +101,9 @@ _col0 + + + @@ -180,6 +183,9 @@ _col1 + + + @@ -269,6 +275,9 @@ key + + src + @@ -320,6 +329,9 @@ value + + src + @@ -427,6 +439,9 @@ key + + src + @@ -437,6 +452,9 @@ value + + src + @@ -498,6 +516,13 @@ + + + + src + + + @@ -528,7 +553,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -540,7 +565,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -581,7 +606,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1424480424/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/38674669/10001 @@ -661,6 +686,9 @@ _col1 + + + @@ -672,6 +700,9 @@ _col0 + + + Index: ql/src/test/results/compiler/plan/groupby6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby6.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/groupby6.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -101,6 +101,9 @@ _col0 + + + @@ -263,6 +266,9 @@ value + + src + @@ -368,6 +374,9 @@ value + + src + @@ -416,6 +425,13 @@ + + + + src + + + @@ -457,7 +473,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -469,7 +485,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -510,7 +526,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/485561416/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/596704722/10001 @@ -580,6 +596,9 @@ _col0 + + + Index: ql/src/test/results/compiler/plan/case_sensitivity.q.xml =================================================================== --- ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1671718418/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/473117287/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1671718418/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/473117287/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/254688401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1404753300/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1671718418/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/473117287/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/254688401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1404753300/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/254688401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1404753300/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/254688401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1404753300/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/254688401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1404753300/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1671718418/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/473117287/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/254688401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1404753300/10002 @@ -619,7 +628,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift @@ -658,7 +667,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/254688401/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1404753300/10002 @@ -693,6 +702,9 @@ lintstring + + src_thrift + @@ -743,6 +755,9 @@ lint + + src_thrift + @@ -868,6 +883,9 @@ lint + + src_thrift + @@ -1030,6 +1048,9 @@ lint + + src_thrift + @@ -1095,6 +1116,13 @@ + + + + src_thrift + + + @@ -1119,7 +1147,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift src_thrift @@ -1131,7 +1159,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src_thrift + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src_thrift Index: ql/src/test/results/compiler/plan/udf_when.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -96,7 +96,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/811829169/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1521735447/10001 @@ -548,6 +548,13 @@ + + + + src + + + @@ -590,7 +597,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -602,7 +609,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/input20.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input20.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input20.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -409,6 +409,9 @@ key + + src + @@ -522,6 +525,9 @@ key + + src + @@ -649,6 +655,13 @@ + + + + src + + + @@ -697,7 +710,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src tmap:src @@ -709,7 +722,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -754,7 +767,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1131703957/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1928492642/10001 @@ -930,6 +943,9 @@ _col1 + + tmap + @@ -941,6 +957,9 @@ _col0 + + tmap + @@ -1018,6 +1037,9 @@ VALUE + + + Index: ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample1.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -77,7 +77,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/srcpart + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart @@ -113,7 +113,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1155441569/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/422992274/10001 @@ -217,6 +217,9 @@ hr + + s + @@ -228,6 +231,9 @@ ds + + s + @@ -239,6 +245,9 @@ value + + s + @@ -250,6 +259,9 @@ key + + s + @@ -402,6 +414,9 @@ ds + + s + @@ -456,6 +471,9 @@ hr + + s + @@ -554,6 +572,9 @@ + + true + @@ -731,6 +752,13 @@ + + + + s + + + @@ -761,7 +789,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 s @@ -773,7 +801,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Index: ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample2.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/544836648/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/192860769/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/544836648/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/192860769/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/475857052/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419510544/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/544836648/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/192860769/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/475857052/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419510544/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/475857052/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419510544/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/475857052/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419510544/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/475857052/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419510544/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/544836648/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/192860769/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/475857052/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419510544/10002 @@ -619,7 +628,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket @@ -658,7 +667,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/475857052/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/419510544/10002 @@ -687,6 +696,9 @@ value + + s + @@ -698,6 +710,9 @@ key + + s + @@ -773,6 +788,9 @@ + + true + @@ -842,6 +860,9 @@ key + + s + @@ -975,6 +996,9 @@ key + + s + @@ -1091,6 +1115,13 @@ + + + + s + + + @@ -1115,7 +1146,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1127,7 +1158,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Index: ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample3.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1202686717/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/708382098/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1202686717/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/708382098/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/401269254/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1149510127/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1202686717/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/708382098/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/401269254/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1149510127/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/401269254/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1149510127/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/401269254/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1149510127/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/401269254/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1149510127/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1202686717/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/708382098/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/401269254/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1149510127/10002 @@ -619,7 +628,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket @@ -658,7 +667,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/401269254/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1149510127/10002 @@ -687,6 +696,9 @@ value + + s + @@ -698,6 +710,9 @@ key + + s + @@ -773,6 +788,9 @@ + + true + @@ -842,6 +860,9 @@ key + + s + @@ -852,6 +873,9 @@ value + + s + @@ -985,6 +1009,9 @@ key + + s + @@ -995,6 +1022,9 @@ value + + s + @@ -1111,6 +1141,13 @@ + + + + s + + + @@ -1135,7 +1172,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket s @@ -1147,7 +1184,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Index: ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample4.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/178303953/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1362557855/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/178303953/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1362557855/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1277439507/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1786739434/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/178303953/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1362557855/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1277439507/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1786739434/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1277439507/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1786739434/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1277439507/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1786739434/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1277439507/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1786739434/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/178303953/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1362557855/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1277439507/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1786739434/10002 @@ -619,7 +628,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket @@ -658,7 +667,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1277439507/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1786739434/10002 @@ -687,6 +696,9 @@ value + + s + @@ -698,6 +710,9 @@ key + + s + @@ -773,6 +788,9 @@ + + true + @@ -842,6 +860,9 @@ key + + s + @@ -975,6 +996,9 @@ key + + s + @@ -1091,6 +1115,13 @@ + + + + s + + + @@ -1115,7 +1146,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1127,7 +1158,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Index: ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample5.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1582733254/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1908916346/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1582733254/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1908916346/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1888189813/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/936833563/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1582733254/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1908916346/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1888189813/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/936833563/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1888189813/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/936833563/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1888189813/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/936833563/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1888189813/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/936833563/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1582733254/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1908916346/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1888189813/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/936833563/10002 @@ -619,7 +628,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket @@ -658,7 +667,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1888189813/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/936833563/10002 @@ -687,6 +696,9 @@ value + + s + @@ -698,6 +710,9 @@ key + + s + @@ -773,6 +788,9 @@ + + true + @@ -842,6 +860,9 @@ key + + s + @@ -975,6 +996,9 @@ key + + s + @@ -1091,6 +1115,13 @@ + + + + s + + + @@ -1115,7 +1146,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket s @@ -1127,7 +1158,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket Index: ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample6.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/4477686/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1211971134/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/4477686/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1211971134/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1401174564/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/125395821/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/4477686/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1211971134/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1401174564/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/125395821/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1401174564/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/125395821/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1401174564/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/125395821/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1401174564/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/125395821/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/4477686/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1211971134/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1401174564/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/125395821/10002 @@ -619,7 +628,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket @@ -658,7 +667,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1401174564/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/125395821/10002 @@ -687,6 +696,9 @@ value + + s + @@ -698,6 +710,9 @@ key + + s + @@ -773,6 +788,9 @@ + + true + @@ -842,6 +860,9 @@ key + + s + @@ -975,6 +996,9 @@ key + + s + @@ -1091,6 +1115,13 @@ + + + + s + + + @@ -1115,7 +1146,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1127,7 +1158,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/871673641/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/600451135/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/871673641/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/600451135/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/449091123/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919018855/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/871673641/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/600451135/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/449091123/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919018855/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/449091123/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919018855/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/449091123/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919018855/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/449091123/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919018855/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/871673641/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/600451135/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -527,7 +536,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/449091123/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919018855/10002 @@ -619,7 +628,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket @@ -662,7 +671,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/449091123/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1919018855/10002 @@ -691,6 +700,9 @@ value + + s + @@ -702,6 +714,9 @@ key + + s + @@ -802,6 +817,9 @@ key + + s + @@ -874,6 +892,9 @@ + + true + @@ -943,6 +964,9 @@ key + + s + @@ -1092,6 +1116,9 @@ key + + s + @@ -1182,6 +1209,9 @@ key + + s + @@ -1237,6 +1267,13 @@ + + + + s + + + @@ -1261,7 +1298,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1273,7 +1310,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -64,7 +64,7 @@ location - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -100,7 +100,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/811031924/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1706769894/10001 @@ -837,6 +837,9 @@ key + + src + @@ -971,6 +974,9 @@ key + + src + @@ -1046,6 +1052,13 @@ + + + + src + + + @@ -1067,7 +1080,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -1079,7 +1092,7 @@ - file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/join1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join1.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/join1.q.xml (working copy) @@ -26,7 +26,7 @@ true - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/tmp/972415285/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1981920029/10000 @@ -79,7 +79,7 @@ location - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -89,7 +89,7 @@ - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/tmp/972415285/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1981920029/10001 @@ -162,7 +162,7 @@ location - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -230,7 +230,7 @@ location - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -259,6 +259,9 @@ value + + src2 + @@ -279,6 +282,9 @@ key + + src2 + @@ -405,6 +411,13 @@ + + + + src2 + + + @@ -460,6 +473,9 @@ key + + src1 + @@ -476,6 +492,9 @@ key + + src1 + @@ -599,6 +618,13 @@ + + + + src1 + + + @@ -650,7 +676,7 @@ - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src2 @@ -665,7 +691,7 @@ - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -709,7 +735,7 @@ 1 - file:/data/users/zshao/tools/522-trunk-apache-hive/build/ql/tmp/972415285/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1981920029/10000 @@ -763,6 +789,9 @@ _col3 + + src2 + @@ -774,6 +803,9 @@ _col0 + + src1 + @@ -852,6 +884,9 @@ VALUE._col1 + + src2 + @@ -863,6 +898,9 @@ VALUE._col0 + + src1 + Index: ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input1.q.xml (revision 801363) +++ ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -30,7 +30,7 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/6003728/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/574595116/10000 @@ -83,7 +83,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 @@ -93,7 +93,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/6003728/10001 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/574595116/10001 @@ -121,10 +121,10 @@ true - file:/data/users/emil/hive1/hive1/build/ql/tmp/1631050253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2068381100/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/6003728/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/574595116/10000 @@ -142,7 +142,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1631050253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2068381100/10002 @@ -238,6 +238,9 @@ key + + + @@ -252,6 +255,9 @@ value + + + @@ -345,10 +351,10 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1631050253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2068381100/10002 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1631050253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2068381100/10002 @@ -357,7 +363,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1631050253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2068381100/10002 @@ -375,7 +381,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/6003728/10000 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/574595116/10000 @@ -424,7 +430,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1 file.outputformat @@ -460,6 +466,9 @@ VALUE + + + @@ -523,7 +532,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/tmp/1631050253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2068381100/10002 @@ -611,7 +620,7 @@ location - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src @@ -650,7 +659,7 @@ 1 - file:/data/users/emil/hive1/hive1/build/ql/tmp/1631050253/10002 + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2068381100/10002 @@ -679,6 +688,9 @@ value + + src + @@ -690,6 +702,9 @@ key + + src + @@ -804,6 +819,9 @@ key + + src + @@ -942,6 +960,9 @@ key + + src + @@ -1017,6 +1038,13 @@ + + + + src + + + @@ -1041,7 +1069,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src src @@ -1053,7 +1081,7 @@ - file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src + file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (revision 801363) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (working copy) @@ -155,7 +155,7 @@ private filterDesc getTestFilterDesc(String column) { ArrayList children1 = new ArrayList(); - children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column)); + children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column, "", false)); exprNodeDesc lhs = new exprNodeFuncDesc( Constants.DOUBLE_TYPE_NAME, TypeInfoFactory.doubleTypeInfo, @@ -183,7 +183,7 @@ FunctionRegistry.getUDFMethod("<", TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.doubleTypeInfo), children3); - return new filterDesc(desc); + return new filterDesc(desc, false); } @SuppressWarnings("unchecked") @@ -325,7 +325,7 @@ new exprNodeFieldDesc(TypeInfoFactory.stringTypeInfo, new exprNodeColumnDesc(TypeInfoFactory.getListTypeInfo( TypeInfoFactory.stringTypeInfo), - Utilities.ReduceField.VALUE.toString()), + Utilities.ReduceField.VALUE.toString(), "", false), "0", false)), outputColumns), op4); @@ -374,7 +374,7 @@ } public static exprNodeColumnDesc getStringColumn(String columnName) { - return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName); + return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName, "", false); } @SuppressWarnings("unchecked") Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (revision 801363) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (working copy) @@ -98,7 +98,7 @@ public void testExprNodeColumnEvaluator() throws Throwable { try { // get a evaluator for a simple field expression - exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola"); + exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola", "", false); ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc); // evaluate on row @@ -128,8 +128,8 @@ public void testExprNodeFuncEvaluator() throws Throwable { try { // get a evaluator for a string concatenation expression - exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1"); - exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola"); + exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "", false); + exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola", "", false); exprNodeDesc col11desc = getListIndexNode(col1desc, 1); exprNodeDesc cola0desc = getListIndexNode(coladesc, 0); exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", col11desc, cola0desc); @@ -150,7 +150,7 @@ public void testExprNodeConversionEvaluator() throws Throwable { try { // get a evaluator for a string concatenation expression - exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1"); + exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "", false); exprNodeDesc col11desc = getListIndexNode(col1desc, 1); exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Constants.DOUBLE_TYPE_NAME, col11desc); ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1); @@ -253,8 +253,8 @@ basetimes * 10, ExprNodeEvaluatorFactory.get( TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", - getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant1), - getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant1))), + getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1), + getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1))), r, "1b"); measureSpeed("concat(concat(col1[1], cola[1]), col1[2])", @@ -262,9 +262,9 @@ ExprNodeEvaluatorFactory.get( TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", - getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant1), - getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant1)), - getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant2))), + getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1), + getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1)), + getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant2))), r, "1b2"); measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])", @@ -273,10 +273,10 @@ TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", - getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant1), - getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant1)), - getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant2)), - getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant2))), + getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1), + getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1)), + getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant2)), + getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant2))), r, "1b2c"); Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (revision 801363) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (working copy) @@ -43,11 +43,11 @@ try { // initialize a complete map reduce configuration - exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F1); - exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F2); + exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F1, "", false); + exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F2, "", false); exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", expr1, expr2); - filterDesc filterCtx = new filterDesc(filterExpr); + filterDesc filterCtx = new filterDesc(filterExpr, false); Operator op = OperatorFactory.get(filterDesc.class); op.setConf(filterCtx); Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (revision 801363) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (working copy) @@ -90,7 +90,7 @@ exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero); exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&&", func1, func2); assert(func3 != null); - filterDesc filterCtx = new filterDesc(func3); + filterDesc filterCtx = new filterDesc(func3, false); // Configuration Operator op = OperatorFactory.get(filterDesc.class); Index: ql/src/test/queries/clientpositive/transform_ppr2.q =================================================================== --- ql/src/test/queries/clientpositive/transform_ppr2.q (revision 0) +++ ql/src/test/queries/clientpositive/transform_ppr2.q (revision 0) @@ -0,0 +1,21 @@ +set hive.optimize.ppd=true; + +EXPLAIN EXTENDED +FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + WHERE src.ds = '2008-04-08' + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; + +FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + WHERE src.ds = '2008-04-08' + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; + Index: ql/src/test/queries/clientpositive/join_map_ppr.q =================================================================== --- ql/src/test/queries/clientpositive/join_map_ppr.q (revision 0) +++ ql/src/test/queries/clientpositive/join_map_ppr.q (revision 0) @@ -0,0 +1,19 @@ +CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE; + +EXPLAIN EXTENDED +INSERT OVERWRITE TABLE dest_j1 +SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value +FROM src1 x JOIN src y ON (x.key = y.key) +JOIN srcpart z ON (x.key = z.key) +WHERE z.ds='2008-04-08' and z.hr=11; + +INSERT OVERWRITE TABLE dest_j1 +SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value +FROM src1 x JOIN src y ON (x.key = y.key) +JOIN srcpart z ON (x.key = z.key) +WHERE z.ds='2008-04-08' and z.hr=11; + +select * from dest_j1 x order by x.key; + +drop table dest_j1; + Index: ql/src/test/queries/clientpositive/union_ppr.q =================================================================== --- ql/src/test/queries/clientpositive/union_ppr.q (revision 0) +++ ql/src/test/queries/clientpositive/union_ppr.q (revision 0) @@ -0,0 +1,16 @@ +EXPLAIN EXTENDED +SELECT * FROM ( + SELECT X.* FROM SRCPART X WHERE X.key < 100 + UNION ALL + SELECT Y.* FROM SRCPART Y WHERE Y.key < 100 +) A +WHERE A.ds = '2008-04-08' +SORT BY A.key; + +SELECT * FROM ( + SELECT X.* FROM SRCPART X WHERE X.key < 100 + UNION ALL + SELECT Y.* FROM SRCPART Y WHERE Y.key < 100 +) A +WHERE A.ds = '2008-04-08' +SORT BY A.key; Index: ql/src/test/queries/clientpositive/groupby_ppr.q =================================================================== --- ql/src/test/queries/clientpositive/groupby_ppr.q (revision 0) +++ ql/src/test/queries/clientpositive/groupby_ppr.q (revision 0) @@ -0,0 +1,19 @@ +set hive.map.aggr=false; +set hive.groupby.skewindata=false; + +CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE; + +EXPLAIN EXTENDED +FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1); + +FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1); + +SELECT dest1.* FROM dest1; Index: ql/src/test/queries/clientpositive/outer_join_ppr.q =================================================================== --- ql/src/test/queries/clientpositive/outer_join_ppr.q (revision 0) +++ ql/src/test/queries/clientpositive/outer_join_ppr.q (revision 0) @@ -0,0 +1,35 @@ +set hive.optimize.ppd=true; + +EXPLAIN EXTENDED + FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + +EXPLAIN EXTENDED + FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'; + + FROM + src a + FULL OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'; Index: ql/src/test/queries/clientpositive/groupby_map_ppr.q =================================================================== --- ql/src/test/queries/clientpositive/groupby_map_ppr.q (revision 0) +++ ql/src/test/queries/clientpositive/groupby_map_ppr.q (revision 0) @@ -0,0 +1,20 @@ +set hive.map.aggr=true; +set hive.groupby.skewindata=false; +set mapred.reduce.tasks=31; + +CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE; + +EXPLAIN EXTENDED +FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1); + +FROM srcpart src +INSERT OVERWRITE TABLE dest1 +SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) +WHERE src.ds = '2008-04-08' +GROUP BY substr(src.key,1,1); + +SELECT dest1.* FROM dest1; Index: ql/src/test/queries/clientpositive/louter_join_ppr.q =================================================================== --- ql/src/test/queries/clientpositive/louter_join_ppr.q (revision 0) +++ ql/src/test/queries/clientpositive/louter_join_ppr.q (revision 0) @@ -0,0 +1,71 @@ +set hive.optimize.ppd=true; + +EXPLAIN EXTENDED + FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + +EXPLAIN EXTENDED + FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + +EXPLAIN EXTENDED + FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'; + + FROM + src a + LEFT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'; + +EXPLAIN EXTENDED + FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'; + + FROM + srcpart a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'; + Index: ql/src/test/queries/clientpositive/router_join_ppr.q =================================================================== --- ql/src/test/queries/clientpositive/router_join_ppr.q (revision 0) +++ ql/src/test/queries/clientpositive/router_join_ppr.q (revision 0) @@ -0,0 +1,71 @@ +set hive.optimize.ppd=true; + +EXPLAIN EXTENDED + FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key AND b.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + +EXPLAIN EXTENDED + FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key AND a.ds = '2008-04-08') + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + +EXPLAIN EXTENDED + FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'; + + FROM + src a + RIGHT OUTER JOIN + srcpart b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'; + +EXPLAIN EXTENDED + FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'; + + FROM + srcpart a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'; + Index: ql/src/test/queries/clientpositive/transform_ppr1.q =================================================================== --- ql/src/test/queries/clientpositive/transform_ppr1.q (revision 0) +++ ql/src/test/queries/clientpositive/transform_ppr1.q (revision 0) @@ -0,0 +1,19 @@ +set hive.optimize.ppd=true; + +EXPLAIN EXTENDED +FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 AND tmap.ds = '2008-04-08'; + +FROM ( + FROM srcpart src + SELECT TRANSFORM(src.ds, src.key, src.value) + USING '/bin/cat' AS (ds, tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 AND tmap.ds = '2008-04-08'; + Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy) @@ -128,7 +128,8 @@ ArrayList valueCols = new ArrayList(); for (ColumnInfo ci : fsRS.getSignature()) { - valueCols.add(new exprNodeColumnDesc(ci.getType(), ci.getInternalName())); + valueCols.add(new exprNodeColumnDesc(ci.getType(), ci.getInternalName(), ci.getTabAlias(), + ci.getIsPartitionCol())); } // create a dummy tableScan operator @@ -155,13 +156,15 @@ for(ColumnInfo colInfo: interim_rwsch.getColumnInfos()) { String [] info = interim_rwsch.reverseLookup(colInfo.getInternalName()); out_rwsch.put(info[0], info[1], - new ColumnInfo(pos.toString(), colInfo.getType())); + new ColumnInfo(pos.toString(), colInfo.getType(), info[0], + colInfo.getIsPartitionCol())); pos = Integer.valueOf(pos.intValue() + 1); } Operator extract = OperatorFactory.getAndMakeChild( - new extractDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, Utilities.ReduceField.VALUE.toString())), + new extractDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, + Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema(out_rwsch.getColumnInfos())); tableDesc ts = (tableDesc)fsConf.getTableInfo().clone(); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcCtx.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcCtx.java (revision 0) @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.ppr; + +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; + +/** + * The processor context for partition pruner. This contains the table + * alias that is being currently processed. + */ +public class ExprProcCtx implements NodeProcessorCtx { + + /** + * The table alias that is being currently processed. + */ + String tabAlias; + + /** + * Flag to hold whether there are any non partition columns accessed in the + * expression. + */ + boolean hasNonPartCols; + + public ExprProcCtx(String tabAlias) { + this.tabAlias = tabAlias; + this.hasNonPartCols = false; + } + + public String getTabAlias() { + return tabAlias; + } + + public void setTabAlias(String tabAlias) { + this.tabAlias = tabAlias; + } + + public boolean getHasNonPartCols() { + return this.hasNonPartCols; + } + + public void setHasNonPartCols(boolean val) { + this.hasNonPartCols = val; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java (revision 0) @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.ppr; + +import java.util.HashMap; + +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.plan.exprNodeDesc; + +/** + * Context class for operator tree walker for partition pruner. + */ +public class OpWalkerCtx implements NodeProcessorCtx { + + private boolean hasNonPartCols; + + /** + * Map from tablescan operator to partition pruning predicate + * that is initialized from the ParseContext + */ + private HashMap opToPartPruner; + + /** + * Constructor + */ + public OpWalkerCtx(HashMap opToPartPruner) { + this.opToPartPruner = opToPartPruner; + this.hasNonPartCols = false; + } + + public HashMap getOpToPartPruner() { + return this.opToPartPruner; + } + + public void addHasNonPartCols(boolean val) { + hasNonPartCols = (hasNonPartCols || val); + } + + public boolean getHasNonPartCols() { + return hasNonPartCols; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (revision 0) @@ -0,0 +1,162 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.ppr; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; +import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; +import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; +import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; +import org.apache.hadoop.hive.ql.lib.Dispatcher; +import org.apache.hadoop.hive.ql.lib.GraphWalker; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.Rule; +import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.optimizer.Transform; +import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.exprNodeDesc; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; + +/** + * The transformation step that does partition pruning. + * + */ +public class PartitionPruner implements Transform { + + // The log + private static final Log LOG = LogFactory.getLog("hive.ql.optimizer.ppr.PartitionPruner"); + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.ql.optimizer.Transform#transform(org.apache.hadoop.hive.ql.parse.ParseContext) + */ + @Override + public ParseContext transform(ParseContext pctx) throws SemanticException { + + // create a the context for walking operators + OpWalkerCtx opWalkerCtx = new OpWalkerCtx(pctx.getOpToPartPruner()); + + Map opRules = new LinkedHashMap(); + opRules.put(new RuleRegExp("R1", "(TS%FIL%)|(TS%FIL%FIL%)"), + OpProcFactory.getFilterProc()); + + // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along + Dispatcher disp = new DefaultRuleDispatcher(OpProcFactory.getDefaultProc(), opRules, opWalkerCtx); + GraphWalker ogw = new DefaultGraphWalker(disp); + + // Create a list of topop nodes + ArrayList topNodes = new ArrayList(); + topNodes.addAll(pctx.getTopOps().values()); + ogw.startWalking(topNodes, null); + pctx.setHasNonPartCols(opWalkerCtx.getHasNonPartCols()); + + return pctx; + } + + public static PrunedPartitionList prune(Table tab, exprNodeDesc prunerExpr) throws HiveException { + LOG.trace("Started pruning partiton"); + LOG.trace("tabname = " + tab.getName()); + LOG.trace("prune Expression = " + prunerExpr); + + LinkedHashSet true_parts = new LinkedHashSet(); + LinkedHashSet unkn_parts = new LinkedHashSet(); + LinkedHashSet denied_parts = new LinkedHashSet(); + + try { + StructObjectInspector rowObjectInspector = (StructObjectInspector)tab.getDeserializer().getObjectInspector(); + Object[] rowWithPart = new Object[2]; + + if(tab.isPartitioned()) { + for(String partName: Hive.get().getPartitionNames(tab.getDbName(), tab.getName(), (short) -1)) { + // Set all the variables here + LinkedHashMap partSpec = Warehouse.makeSpecFromName(partName); + // Create the row object + ArrayList partNames = new ArrayList(); + ArrayList partValues = new ArrayList(); + ArrayList partObjectInspectors = new ArrayList(); + for(Map.Entryentry : partSpec.entrySet()) { + partNames.add(entry.getKey()); + partValues.add(entry.getValue()); + partObjectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + } + StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors); + + rowWithPart[1] = partValues; + ArrayList ois = new ArrayList(2); + ois.add(rowObjectInspector); + ois.add(partObjectInspector); + StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois); + + // evaluate the expression tree + if (prunerExpr != null) { + ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(prunerExpr); + ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector); + Object evaluateResultO = evaluator.evaluate(rowWithPart); + Boolean r = (Boolean) ((PrimitiveObjectInspector)evaluateResultOI).getPrimitiveJavaObject(evaluateResultO); + LOG.trace("prune result for partition " + partSpec + ": " + r); + if (Boolean.FALSE.equals(r)) { + if (denied_parts.isEmpty()) { + Partition part = Hive.get().getPartition(tab, partSpec, Boolean.FALSE); + denied_parts.add(part); + } + LOG.trace("pruned partition: " + partSpec); + } else { + Partition part = Hive.get().getPartition(tab, partSpec, Boolean.FALSE); + if (Boolean.TRUE.equals(r)) { + LOG.debug("retained partition: " + partSpec); + true_parts.add(part); + } else { + LOG.debug("unknown partition: " + partSpec); + unkn_parts.add(part); + } + } + } else { + // is there is no parition pruning, all of them are needed + true_parts.add(Hive.get().getPartition(tab, partSpec, Boolean.FALSE)); + } + } + } else { + true_parts.addAll(Hive.get().getPartitions(tab)); + } + } catch (Exception e) { + throw new HiveException(e); + } + + // Now return the set of partitions + return new PrunedPartitionList(true_parts, unkn_parts, denied_parts); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprPrunerInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprPrunerInfo.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprPrunerInfo.java (revision 0) @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.ppr; + +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; + +/** + * The processor context for partition pruner. This contains the table + * alias that is being currently processed. + */ +public class ExprPrunerInfo implements NodeProcessorCtx { + + /** + * The table alias that is being currently processed. + */ + String tabAlias; + + public String getTabAlias() { + return tabAlias; + } + + public void setTabAlias(String tabAlias) { + this.tabAlias = tabAlias; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java (revision 0) @@ -0,0 +1,274 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.ppr; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Stack; + +import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; +import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; +import org.apache.hadoop.hive.ql.lib.Dispatcher; +import org.apache.hadoop.hive.ql.lib.GraphWalker; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.lib.Rule; +import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc; +import org.apache.hadoop.hive.ql.udf.UDFOPAnd; +import org.apache.hadoop.hive.ql.udf.UDFOPOr; +import org.apache.hadoop.hive.ql.udf.UDFOPNot; +import org.apache.hadoop.hive.ql.udf.UDFType; + +/** + * Expression processor factory for partition pruning. Each processor tries + * to convert the expression subtree into a partition pruning expression. + * This expression is then used to figure out whether a particular partition + * should be scanned or not. + */ +public class ExprProcFactory { + + /** + * Processor for column expressions. + */ + public static class ColumnExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc newcd = null; + exprNodeColumnDesc cd = (exprNodeColumnDesc) nd; + ExprProcCtx epc = (ExprProcCtx) procCtx; + if (cd.getTabAlias().equalsIgnoreCase(epc.getTabAlias()) && cd.getIsParititonCol()) + newcd = cd.clone(); + else { + newcd = new exprNodeConstantDesc(cd.getTypeInfo(), null); + epc.setHasNonPartCols(true); + } + + return newcd; + } + + } + + /** + * Process function descriptors. + */ + public static class FuncExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc newfd = null; + exprNodeFuncDesc fd = (exprNodeFuncDesc) nd; + + boolean unknown = false; + // Check if any of the children is unknown for non logical operators + if (!fd.getUDFMethod().getDeclaringClass().equals(UDFOPAnd.class) + && !fd.getUDFMethod().getDeclaringClass().equals(UDFOPOr.class) + && !fd.getUDFMethod().getDeclaringClass().equals(UDFOPNot.class)) + for(Object child: nodeOutputs) { + exprNodeDesc child_nd = (exprNodeDesc)child; + if (child_nd instanceof exprNodeConstantDesc && + ((exprNodeConstantDesc)child_nd).getValue() == null) { + unknown = true; + } + } + + if (fd.getUDFClass().getAnnotation(UDFType.class) != null && + (fd.getUDFClass().getAnnotation(UDFType.class).deterministic() == false || + unknown)) + newfd = new exprNodeConstantDesc(fd.getTypeInfo(), null); + else { + // Create the list of children + ArrayList children = new ArrayList(); + for(Object child: nodeOutputs) { + children.add((exprNodeDesc) child); + } + // Create a copy of the function descriptor + newfd = new exprNodeFuncDesc(fd.getMethodName(), + fd.getTypeInfo(), fd.getUDFClass(), + fd.getUDFMethod(), children); + } + + return newfd; + } + + } + + /** + * If all children are candidates and refer only to one table alias then this expr is a candidate + * else it is not a candidate but its children could be final candidates + */ + public static class GenericFuncExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc newfd = null; + exprNodeGenericFuncDesc fd = (exprNodeGenericFuncDesc) nd; + + boolean unknown = false; + // Check if any of the children is unknown + for(Object child: nodeOutputs) { + exprNodeDesc child_nd = (exprNodeDesc)child; + if (child_nd instanceof exprNodeConstantDesc && + ((exprNodeConstantDesc)child_nd).getValue() == null) { + unknown = true; + } + } + + if (unknown) + newfd = new exprNodeConstantDesc(fd.getTypeInfo(), null); + else { + // Create the list of children + ArrayList children = new ArrayList(); + for(Object child: nodeOutputs) { + children.add((exprNodeDesc) child); + } + // Create a copy of the function descriptor + newfd = new exprNodeGenericFuncDesc(fd.getTypeInfo(), fd.getGenericUDFClass(), children); + } + + return newfd; + } + + } + + public static class FieldExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeFieldDesc fnd = (exprNodeFieldDesc)nd; + boolean unknown = false; + int idx = 0; + exprNodeDesc left_nd = null; + for(Object child: nodeOutputs) { + exprNodeDesc child_nd = (exprNodeDesc) child; + if (child_nd instanceof exprNodeConstantDesc && + ((exprNodeConstantDesc)child_nd).getValue() == null) + unknown = true; + left_nd = child_nd; + } + + assert(idx == 0); + + exprNodeDesc newnd = null; + if (unknown) { + newnd = new exprNodeConstantDesc(fnd.getTypeInfo(), null); + } + else { + newnd = new exprNodeFieldDesc(fnd.getTypeInfo(), left_nd, fnd.getFieldName(), fnd.getIsList()); + } + return newnd; + } + + } + + /** + * Processor for constants and null expressions. For such expressions + * the processor simply clones the exprNodeDesc and returns it. + */ + public static class DefaultExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + if (nd instanceof exprNodeConstantDesc) + return ((exprNodeConstantDesc)nd).clone(); + else if (nd instanceof exprNodeNullDesc) + return ((exprNodeNullDesc)nd).clone(); + + assert(false); + return null; + } + } + + public static NodeProcessor getDefaultExprProcessor() { + return new DefaultExprProcessor(); + } + + public static NodeProcessor getFuncProcessor() { + return new FuncExprProcessor(); + } + + public static NodeProcessor getGenericFuncProcessor() { + return new GenericFuncExprProcessor(); + } + + public static NodeProcessor getFieldProcessor() { + return new FieldExprProcessor(); + } + + public static NodeProcessor getColumnProcessor() { + return new ColumnExprProcessor(); + } + + /** + * Generates the partition pruner for the expression tree + * @param tabAlias The table alias of the partition table that is being considered for pruning + * @param pred The predicate from which the partition pruner needs to be generated + * @return hasNonPartCols returns true/false depending upon whether this pred has a non partition column + * @throws SemanticException + */ + public static exprNodeDesc genPruner(String tabAlias, exprNodeDesc pred, + boolean hasNonPartCols) throws SemanticException { + // Create the walker, the rules dispatcher and the context. + ExprProcCtx pprCtx= new ExprProcCtx(tabAlias); + + // create a walker which walks the tree in a DFS manner while maintaining the operator stack. The dispatcher + // generates the plan from the operator tree + Map exprRules = new LinkedHashMap(); + exprRules.put(new RuleRegExp("R1", exprNodeColumnDesc.class.getName() + "%"), getColumnProcessor()); + exprRules.put(new RuleRegExp("R2", exprNodeFieldDesc.class.getName() + "%"), getFieldProcessor()); + exprRules.put(new RuleRegExp("R3", exprNodeFuncDesc.class.getName() + "%"), getFuncProcessor()); + exprRules.put(new RuleRegExp("R5", exprNodeGenericFuncDesc.class.getName() + "%"), getGenericFuncProcessor()); + + // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along + Dispatcher disp = new DefaultRuleDispatcher(getDefaultExprProcessor(), exprRules, pprCtx); + GraphWalker egw = new DefaultGraphWalker(disp); + + List startNodes = new ArrayList(); + startNodes.add(pred); + + HashMap outputMap = new HashMap(); + egw.startWalking(startNodes, outputMap); + hasNonPartCols = pprCtx.getHasNonPartCols(); + + // Get the exprNodeDesc corresponding to the first start node; + return (exprNodeDesc)outputMap.get(pred); + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java (revision 0) @@ -0,0 +1,138 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer.ppr; + +import java.util.Map; +import java.util.Stack; +import org.apache.hadoop.hive.ql.exec.FilterOperator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; +import org.apache.hadoop.hive.ql.plan.exprNodeDesc; + +/** + * Operator factory for partition pruning processing of operator graph + * We find all the filter operators that appear just beneath the table scan + * operators. We then pass the filter to the partition pruner to construct + * a pruner for that table alias and store a mapping from the table scan + * operator to that pruner. We call that pruner later during plan generation. + */ +public class OpProcFactory { + + /** + * Determines the partition pruner for the filter. This is called only when the filter + * follows a table scan operator. + */ + public static class FilterPPR implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + OpWalkerCtx owc = (OpWalkerCtx)procCtx; + FilterOperator fop = (FilterOperator) nd; + FilterOperator fop2 = null; + + // The stack contains either ... TS, Filter or + // ... TS, Filter, Filter with the head of the stack being the rightmost + // symbol. So we just pop out the two elements from the top and if the + // second one of them is not a table scan then the operator on the top of + // the stack is the Table scan operator. + Node tmp = stack.pop(); + Node tmp2 = stack.pop(); + TableScanOperator top = null; + if (tmp2 instanceof TableScanOperator) { + top = (TableScanOperator)tmp2; + } + else { + top = (TableScanOperator) stack.peek(); + fop2 = (FilterOperator) tmp2; + } + stack.push(tmp2); + stack.push(tmp); + + // If fop2 exists (i.e this is not the top level filter and fop2 is not + // a sampling filter then we ignore the current filter + if (fop2 != null && !fop2.getConf().getIsSamplingPred()) + return null; + + // ignore the predicate in case it is not a sampling predicate + if (fop.getConf().getIsSamplingPred()) { + return null; + } + + // Otherwise this is not a sampling predicate and we need to + exprNodeDesc predicate = fop.getConf().getPredicate(); + String alias = top.getConf().getAlias(); + + // Generate the partition pruning predicate + boolean hasNonPartCols = false; + exprNodeDesc ppr_pred = ExprProcFactory.genPruner(alias, predicate, hasNonPartCols); + owc.addHasNonPartCols(hasNonPartCols); + + // Add the pruning predicate to the table scan operator + addPruningPred(owc.getOpToPartPruner(), top, ppr_pred); + + return null; + } + + private void addPruningPred(Map opToPPR, + TableScanOperator top, exprNodeDesc new_ppr_pred) { + exprNodeDesc old_ppr_pred = opToPPR.get(top); + exprNodeDesc ppr_pred = null; + if (old_ppr_pred != null) { + // or the old_ppr_pred and the new_ppr_pred + ppr_pred = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("OR", old_ppr_pred, new_ppr_pred); + } + else { + ppr_pred = new_ppr_pred; + } + + // Put the mapping from table scan operator to ppr_pred + opToPPR.put(top, ppr_pred); + + return; + } + } + + + /** + * Default processor which just merges its children + */ + public static class DefaultPPR implements NodeProcessor { + + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + // Nothing needs to be done. + return null; + } + } + + public static NodeProcessor getFilterProc() { + return new FilterPPR(); + } + + public static NodeProcessor getDefaultProc() { + return new DefaultPPR(); + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (working copy) @@ -25,61 +25,56 @@ import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.ppd.PredicatePushDown; +import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcessor; /** * Implementation of the optimizer */ public class Optimizer { - private ParseContext pctx; - private List transformations; - - /** - * empty constructor - */ - public Optimizer() { - } + private ParseContext pctx; + private List transformations; /** * create the list of transformations * @param hiveConf */ - public void initialize(HiveConf hiveConf) { - transformations = new ArrayList(); - if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCP)) { - transformations.add(new ColumnPruner()); - } + public void initialize(HiveConf hiveConf) { + transformations = new ArrayList(); + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCP)) { + transformations.add(new ColumnPruner()); + } if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTPPD)) { transformations.add(new PredicatePushDown()); + transformations.add(new PartitionPruner()); } transformations.add(new UnionProcessor()); - transformations.add(new MapJoinProcessor()); - } - - /** - * invoke all the transformations one-by-one, and alter the query plan - * @return ParseContext - * @throws SemanticException - */ - public ParseContext optimize() throws SemanticException { - for (Transform t : transformations) - pctx = t.transform(pctx); + transformations.add(new MapJoinProcessor()); + } + + /** + * invoke all the transformations one-by-one, and alter the query plan + * @return ParseContext + * @throws SemanticException + */ + public ParseContext optimize() throws SemanticException { + for (Transform t : transformations) + pctx = t.transform(pctx); return pctx; - } - - /** - * @return the pctx - */ - public ParseContext getPctx() { - return pctx; - } + } - /** - * @param pctx the pctx to set - */ - public void setPctx(ParseContext pctx) { - this.pctx = pctx; - } - - + /** + * @return the pctx + */ + public ParseContext getPctx() { + return pctx; + } + + /** + * @param pctx the pctx to set + */ + public void setPctx(ParseContext pctx) { + this.pctx = pctx; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (working copy) @@ -392,29 +392,37 @@ Path tblDir = null; tableDesc tblDesc = null; - // Generate the map work for this alias_id - PartitionPruner pruner = parseCtx.getAliasToPruner().get(alias_id); - Set parts = null; + PrunedPartitionList partsList = null; + try { - // pass both confirmed and unknown partitions through the map-reduce framework - PartitionPruner.PrunedPartitionList partsList = pruner.prune(); - - parts = partsList.getConfirmedPartns(); - parts.addAll(partsList.getUnknownPartns()); - partitionDesc aliasPartnDesc = null; - if (parts.isEmpty()) { - if (!partsList.getDeniedPartns().isEmpty()) - aliasPartnDesc = Utilities.getPartitionDesc(partsList.getDeniedPartns().iterator().next()); + if (!opProcCtx.getConf().getBoolVar(HiveConf.ConfVars.HIVEOPTPPD)) { + partsList = parseCtx.getAliasToPruner().get(alias_id).prune(); } else { - aliasPartnDesc = Utilities.getPartitionDesc(parts.iterator().next()); + partsList = org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune( + parseCtx.getTopToTable().get(topOp), + parseCtx.getOpToPartPruner().get(topOp)); } - plan.getAliasToPartnInfo().put(alias_id, aliasPartnDesc); } catch (HiveException e) { - // Has to use full name to make sure it does not conflict with org.apache.commons.lang.StringUtils LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e); } + + // Generate the map work for this alias_id + Set parts = null; + // pass both confirmed and unknown partitions through the map-reduce framework + + parts = partsList.getConfirmedPartns(); + parts.addAll(partsList.getUnknownPartns()); + partitionDesc aliasPartnDesc = null; + if (parts.isEmpty()) { + if (!partsList.getDeniedPartns().isEmpty()) + aliasPartnDesc = Utilities.getPartitionDesc(partsList.getDeniedPartns().iterator().next()); + } + else { + aliasPartnDesc = Utilities.getPartitionDesc(parts.iterator().next()); + } + plan.getAliasToPartnInfo().put(alias_id, aliasPartnDesc); SamplePruner samplePruner = parseCtx.getAliasToSamplePruner().get(alias_id); for (Partition part : parts) { Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (working copy) @@ -178,10 +178,11 @@ String outputCol = oldValueInfo.getInternalName(); if (outputRS.get(key, field) == null) { outputColumnNames.add(outputCol); - exprNodeDesc colDesc = new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName()); + exprNodeDesc colDesc = new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName(), + valueInfo.getTabAlias(), valueInfo.getIsPartitionCol()); values.add(colDesc); outputRS.put(key, field, new ColumnInfo(outputCol, - valueInfo.getType())); + valueInfo.getType(), valueInfo.getTabAlias(), valueInfo.getIsPartitionCol())); colExprMap.put(outputCol, colDesc); } } @@ -283,11 +284,11 @@ String[] nm = inputRR.reverseLookup(internalName); ColumnInfo valueInfo = inputRR.get(nm[0], nm[1]); exprNodeDesc colDesc = new exprNodeColumnDesc(valueInfo.getType(), - valueInfo.getInternalName()); + valueInfo.getInternalName(), nm[0], valueInfo.getIsPartitionCol()); exprs.add(colDesc); outputs.add(internalName); outputRS .put(nm[0], nm[1], new ColumnInfo(internalName, - valueInfo.getType())); + valueInfo.getType(), nm[0], valueInfo.getIsPartitionCol())); colExprMap.put(internalName, colDesc); } Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (working copy) @@ -135,7 +135,7 @@ List> originalChilren = tsOp.getChildOperators(); tsOp.setChildOperators(null); Operator output = - OperatorFactory.getAndMakeChild(new filterDesc(condn), + OperatorFactory.getAndMakeChild(new filterDesc(condn, false), new RowSchema(inputRR.getColumnInfos()), tsOp); output.setChildOperators(originalChilren); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (working copy) @@ -37,25 +37,34 @@ private String internalName; /** - * isVirtual indicates whether the column is a virtual column or not. Virtual columns - * are the ones that are not stored in the tables. For now these are just the partitioning - * columns. + * Store the alias of the table where available. */ - private boolean isVirtual; + private String tabAlias; + /** + * Indicates whether the column is a partition column. + */ + private boolean isPartitionCol; + transient private TypeInfo type; public ColumnInfo() { } - public ColumnInfo(String internalName, TypeInfo type) { + public ColumnInfo(String internalName, TypeInfo type, + String tabAlias, boolean isPartitionCol) { this.internalName = internalName; this.type = type; + this.tabAlias = tabAlias; + this.isPartitionCol = isPartitionCol; } - public ColumnInfo(String internalName, Class type) { + public ColumnInfo(String internalName, Class type, + String tabAlias, boolean isPartitionCol) { this.internalName = internalName; this.type = TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(type); + this.tabAlias = tabAlias; + this.isPartitionCol = isPartitionCol; } public TypeInfo getType() { @@ -74,6 +83,13 @@ this.internalName = internalName; } + public String getTabAlias() { + return this.tabAlias; + } + + public boolean getIsPartitionCol() { + return this.isPartitionCol; + } /** * Returns the string representation of the ColumnInfo. */ Index: ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (working copy) @@ -27,16 +27,36 @@ public class exprNodeColumnDesc extends exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; + + /** + * The column name. + */ private String column; + + /** + * The alias of the table. + */ + private String tabAlias; + /** + * Is the column a partitioned column. + */ + private boolean isPartitionCol; + public exprNodeColumnDesc() {} - public exprNodeColumnDesc(TypeInfo typeInfo, String column) { + public exprNodeColumnDesc(TypeInfo typeInfo, String column, + String tabAlias, boolean isPartitionCol) { super(typeInfo); this.column = column; + this.tabAlias = tabAlias; + this.isPartitionCol = isPartitionCol; } - public exprNodeColumnDesc(Class c, String column) { + public exprNodeColumnDesc(Class c, String column, String tabAlias, + boolean isPartitionCol) { super(TypeInfoFactory.getPrimitiveTypeInfoFromJavaPrimitive(c)); this.column = column; + this.tabAlias = tabAlias; + this.isPartitionCol = isPartitionCol; } public String getColumn() { return this.column; @@ -45,6 +65,20 @@ this.column = column; } + public String getTabAlias() { + return this.tabAlias; + } + public void setTabAlias(String tabAlias) { + this.tabAlias = tabAlias; + } + + public boolean getIsParititonCol() { + return this.isPartitionCol; + } + public void setIsPartitionCol(boolean isPartitionCol) { + this.isPartitionCol = isPartitionCol; + } + public String toString() { return "Column[" + column + "]"; } @@ -62,7 +96,8 @@ } @Override public exprNodeDesc clone() { - return new exprNodeColumnDesc(this.typeInfo, this.column); + return new exprNodeColumnDesc(this.typeInfo, this.column, + this.tabAlias, this.isPartitionCol); } @Override public boolean isSame(Object o) { Index: ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java (working copy) @@ -28,8 +28,22 @@ @explain(displayName="TableScan") public class tableScanDesc implements Serializable { private static final long serialVersionUID = 1L; + + private String alias; + @SuppressWarnings("nls") - public tableScanDesc() { - throw new RuntimeException("This class does not need to be instantiated"); + public tableScanDesc() { } + + public tableScanDesc(final String alias) { + this.alias = alias; } + + @explain(displayName="alias") + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java (working copy) @@ -24,10 +24,12 @@ public class filterDesc implements Serializable { private static final long serialVersionUID = 1L; private org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate; + private boolean isSamplingPred; public filterDesc() { } public filterDesc( - final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) { + final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate, boolean isSamplingPred) { this.predicate = predicate; + this.isSamplingPred = isSamplingPred; } @explain(displayName="predicate") public org.apache.hadoop.hive.ql.plan.exprNodeDesc getPredicate() { @@ -36,4 +38,11 @@ public void setPredicate(final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) { this.predicate = predicate; } + @explain(displayName="isSamplingPred", normalExplain=false) + public boolean getIsSamplingPred() { + return this.isSamplingPred; + } + public void setIsSamplingPred(final boolean isSamplingPred) { + this.isSamplingPred = isSamplingPred; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (working copy) @@ -105,7 +105,8 @@ * @throws SemanticException */ public void walk(Node nd) throws SemanticException { - opStack.push(nd); + if (opStack.empty() || nd != opStack.peek()) + opStack.push(nd); if((nd.getChildren() == null) || getDispatchedList().containsAll(nd.getChildren())) { @@ -121,6 +122,5 @@ getToWalk().add(0, nd); getToWalk().removeAll(nd.getChildren()); getToWalk().addAll(0, nd.getChildren()); - opStack.pop(); } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (working copy) @@ -1,576 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.parse; - -import java.util.*; - -import org.apache.hadoop.hive.metastore.MetaStoreUtils; -import org.apache.hadoop.hive.metastore.Warehouse; -import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; -import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; -import org.apache.hadoop.hive.ql.exec.UDFArgumentException; -import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; -import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; -import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc; -import org.apache.hadoop.hive.ql.plan.exprNodeDesc; -import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc; -import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc; -import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc; -import org.apache.hadoop.hive.ql.udf.UDFOPAnd; -import org.apache.hadoop.hive.ql.udf.UDFOPNot; -import org.apache.hadoop.hive.ql.udf.UDFOPOr; -import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; -import org.apache.hadoop.hive.ql.udf.UDFType; -import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -public class PartitionPruner { - - // The log - @SuppressWarnings("nls") - private static final Log LOG = LogFactory.getLog("hive.ql.parse.PartitionPruner"); - - private String tableAlias; - - private QBMetaData metaData; - - private Table tab; - - private exprNodeDesc prunerExpr; - - // is set to true if the expression only contains partitioning columns and not any other column reference. - // This is used to optimize select * from table where ... scenario, when the where condition only references - // partitioning columns - the partitions are identified and streamed directly to the client without requiring - // a map-reduce job - private boolean onlyContainsPartCols; - - public PartitionPruner() { - } - - /** Creates a new instance of PartitionPruner */ - public PartitionPruner(String tableAlias, QBMetaData metaData) { - this.tableAlias = tableAlias; - this.metaData = metaData; - this.tab = metaData.getTableForAlias(tableAlias); - this.prunerExpr = null; - onlyContainsPartCols = true; - } - - public boolean onlyContainsPartitionCols() { - return onlyContainsPartCols; - } - - /** Class to store the return result of genExprNodeDesc. - * - * TODO: In the future when we refactor the PartitionPruner code, we should - * use the same code (GraphWalker) as it is now in TypeCheckProcFactory. - * We should use NULL to represent a table name node, and the DOT operator - * should descend into the sub tree for 2 levels in order to find out the - * table name. The benefit is that we get rid of another concept class - - * here it is ExprNodeTempDesc - the return value of a branch in the - * Expression Syntax Tree, which is different from the value of a branch in - * the Expression Evaluation Tree. - * - */ - static class ExprNodeTempDesc { - - public ExprNodeTempDesc(exprNodeDesc desc) { - isTableName = false; - this.desc = desc; - } - - public ExprNodeTempDesc(String tableName) { - isTableName = true; - this.tableName = tableName; - } - - public boolean getIsTableName() { - return isTableName; - } - - public exprNodeDesc getDesc() { - return desc; - } - - public String getTableName() { - return tableName; - } - - boolean isTableName; - exprNodeDesc desc; - String tableName; - - public String toString() { - if (isTableName) { - return "Table:" + tableName; - } else { - return "Desc: " + desc; - } - } - }; - - static ExprNodeTempDesc genSimpleExprNodeDesc(ASTNode expr) throws SemanticException { - exprNodeDesc desc = null; - switch(expr.getType()) { - case HiveParser.TOK_NULL: - desc = new exprNodeNullDesc(); - break; - case HiveParser.Identifier: - // This is the case for an XPATH element (like "c" in "a.b.c.d") - desc = new exprNodeConstantDesc( - TypeInfoFactory.stringTypeInfo, - SemanticAnalyzer.unescapeIdentifier(expr.getText())); - break; - case HiveParser.Number: - Number v = null; - try { - v = Double.valueOf(expr.getText()); - v = Long.valueOf(expr.getText()); - v = Integer.valueOf(expr.getText()); - } catch (NumberFormatException e) { - // do nothing here, we will throw an exception in the following block - } - if (v == null) { - throw new SemanticException(ErrorMsg.INVALID_NUMERICAL_CONSTANT.getMsg(expr)); - } - desc = new exprNodeConstantDesc(v); - break; - case HiveParser.StringLiteral: - desc = new exprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, BaseSemanticAnalyzer.unescapeSQLString(expr.getText())); - break; - case HiveParser.TOK_CHARSETLITERAL: - desc = new exprNodeConstantDesc(BaseSemanticAnalyzer.charSetString(expr.getChild(0).getText(), expr.getChild(1).getText())); - break; - case HiveParser.KW_TRUE: - desc = new exprNodeConstantDesc(Boolean.TRUE); - break; - case HiveParser.KW_FALSE: - desc = new exprNodeConstantDesc(Boolean.FALSE); - break; - } - return desc == null ? null : new ExprNodeTempDesc(desc); - } - - /** - * We use exprNodeConstantDesc(class,null) to represent unknown values. - * Except UDFOPAnd, UDFOPOr, and UDFOPNot, all UDFs are assumed to return unknown values - * if any of the arguments are unknown. - * - * @param expr - * @return The expression desc, will NEVER be null. - * @throws SemanticException - */ - @SuppressWarnings("nls") - private ExprNodeTempDesc genExprNodeDesc(ASTNode expr) - throws SemanticException { - // We recursively create the exprNodeDesc. Base cases: when we encounter - // a column ref, we convert that into an exprNodeColumnDesc; when we encounter - // a constant, we convert that into an exprNodeConstantDesc. For others we just - // build the exprNodeFuncDesc with recursively built children. - - // Is this a simple expr node (not a TOK_COLREF or a TOK_FUNCTION or an operator)? - ExprNodeTempDesc tempDesc = genSimpleExprNodeDesc(expr); - if (tempDesc != null) { - return tempDesc; - } - - int tokType = expr.getType(); - switch (tokType) { - case HiveParser.TOK_TABLE_OR_COL: { - String tableOrCol = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()); - - if (metaData.getAliasToTable().get(tableOrCol.toLowerCase()) != null) { - // It's a table name - tempDesc = new ExprNodeTempDesc(tableOrCol); - } else { - // It's a column - String colName = tableOrCol; - String tabAlias = SemanticAnalyzer.getTabAliasForCol(this.metaData, colName, (ASTNode)expr.getChild(0)); - LOG.debug("getTableColumnDesc(" + tabAlias + ", " + colName); - tempDesc = getTableColumnDesc(tabAlias, colName); - } - break; - } - - - default: { - - boolean isFunction = (expr.getType() == HiveParser.TOK_FUNCTION); - - // Create all children - int childrenBegin = (isFunction ? 1 : 0); - ArrayList tempChildren = new ArrayList(expr.getChildCount() - childrenBegin); - for (int ci=childrenBegin; ci children = new ArrayList(expr.getChildCount() - childrenBegin); - for (int ci=0; ci confirmedPartns; - - // unknown partitions - may/may not satisfy the partition criteria - private Set unknownPartns; - - // denied partitions - do not satisfy the partition criteria - private Set deniedPartns; - - /** - * @param confirmedPartns confirmed paritions - * @param unknownPartns unknown partitions - */ - public PrunedPartitionList(Set confirmedPartns, Set unknownPartns, Set deniedPartns) { - this.confirmedPartns = confirmedPartns; - this.unknownPartns = unknownPartns; - this.deniedPartns = deniedPartns; - } - - /** - * get confirmed partitions - * @return confirmedPartns confirmed paritions - */ - public Set getConfirmedPartns() { - return confirmedPartns; - } - - /** - * get unknown partitions - * @return unknownPartns unknown paritions - */ - public Set getUnknownPartns() { - return unknownPartns; - } - - /** - * get denied partitions - * @return deniedPartns denied paritions - */ - public Set getDeniedPartns() { - return deniedPartns; - } - - /** - * set confirmed partitions - * @param confirmedPartns confirmed paritions - */ - public void setConfirmedPartns(Set confirmedPartns) { - this.confirmedPartns = confirmedPartns; - } - - /** - * set unknown partitions - * @param unknownPartns unknown partitions - */ - public void setUnknownPartns(Set unknownPartns) { - this.unknownPartns = unknownPartns; - } - } - - /** - * From the table metadata prune the partitions to return the partitions. - * Evaluate the parition pruner for each partition and return confirmed and unknown partitions separately - */ - @SuppressWarnings("nls") - public PrunedPartitionList prune() throws HiveException { - LOG.trace("Started pruning partiton"); - LOG.trace("tabname = " + this.tab.getName()); - LOG.trace("prune Expression = " + this.prunerExpr); - - LinkedHashSet true_parts = new LinkedHashSet(); - LinkedHashSet unkn_parts = new LinkedHashSet(); - LinkedHashSet denied_parts = new LinkedHashSet(); - - try { - StructObjectInspector rowObjectInspector = (StructObjectInspector)this.tab.getDeserializer().getObjectInspector(); - Object[] rowWithPart = new Object[2]; - - if(tab.isPartitioned()) { - for(String partName: Hive.get().getPartitionNames(MetaStoreUtils.DEFAULT_DATABASE_NAME, tab.getName(), (short) -1)) { - // Set all the variables here - LinkedHashMap partSpec = Warehouse.makeSpecFromName(partName); - LOG.debug("part name: " + partName); - // Create the row object - ArrayList partNames = new ArrayList(); - ArrayList partValues = new ArrayList(); - ArrayList partObjectInspectors = new ArrayList(); - for(Map.Entryentry : partSpec.entrySet()) { - partNames.add(entry.getKey()); - partValues.add(entry.getValue()); - partObjectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - } - StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors); - - rowWithPart[1] = partValues; - ArrayList ois = new ArrayList(2); - ois.add(rowObjectInspector); - ois.add(partObjectInspector); - StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois); - - // evaluate the expression tree - if (this.prunerExpr != null) { - ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr); - ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector); - Object evaluateResultO = evaluator.evaluate(rowWithPart); - Boolean r = (Boolean) ((PrimitiveObjectInspector)evaluateResultOI).getPrimitiveJavaObject(evaluateResultO); - LOG.trace("prune result for partition " + partSpec + ": " + r); - if (Boolean.FALSE.equals(r)) { - if (denied_parts.isEmpty()) { - Partition part = Hive.get().getPartition(tab, partSpec, Boolean.FALSE); - denied_parts.add(part); - } - LOG.trace("pruned partition: " + partSpec); - } else { - Partition part = Hive.get().getPartition(tab, partSpec, Boolean.FALSE); - if (Boolean.TRUE.equals(r)) { - LOG.debug("retained partition: " + partSpec); - true_parts.add(part); - } else { - LOG.debug("unknown partition: " + partSpec); - unkn_parts.add(part); - } - } - } else { - // is there is no parition pruning, all of them are needed - true_parts.add(Hive.get().getPartition(tab, partSpec, Boolean.FALSE)); - } - } - } else { - true_parts.addAll(Hive.get().getPartitions(tab)); - } - } catch (Exception e) { - throw new HiveException(e); - } - - // Now return the set of partitions - return new PrunedPartitionList(true_parts, unkn_parts, denied_parts); - } - - public Table getTable() { - return this.tab; - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ASTPartitionPruner.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ASTPartitionPruner.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ASTPartitionPruner.java (working copy) @@ -20,16 +20,10 @@ import java.util.*; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; -import org.apache.hadoop.hive.metastore.Warehouse; -import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; -import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; -import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc; @@ -47,16 +41,11 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.ql.udf.UDFType; import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -public class PartitionPruner { +public class ASTPartitionPruner { // The log @SuppressWarnings("nls") @@ -76,11 +65,11 @@ // a map-reduce job private boolean onlyContainsPartCols; - public PartitionPruner() { + public ASTPartitionPruner() { } /** Creates a new instance of PartitionPruner */ - public PartitionPruner(String tableAlias, QBMetaData metaData) { + public ASTPartitionPruner(String tableAlias, QBMetaData metaData) { this.tableAlias = tableAlias; this.metaData = metaData; this.tab = metaData.getTableForAlias(tableAlias); @@ -301,7 +290,8 @@ if (t.isPartitionKey(colName)) { // Set value to null if it's not partition column if (tabAlias.equalsIgnoreCase(tableAlias)) { - desc = new ExprNodeTempDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, colName)); + desc = new ExprNodeTempDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, + colName, tabAlias, true)); } else { desc = new ExprNodeTempDesc(new exprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, null)); } @@ -427,147 +417,13 @@ } } - /** - * list of the partitions satisfying the pruning criteria - contains both confirmed and unknown partitions - */ - public static class PrunedPartitionList { - // confirmed partitions - satisfy the partition criteria - private Set confirmedPartns; - - // unknown partitions - may/may not satisfy the partition criteria - private Set unknownPartns; - - // denied partitions - do not satisfy the partition criteria - private Set deniedPartns; - - /** - * @param confirmedPartns confirmed paritions - * @param unknownPartns unknown partitions - */ - public PrunedPartitionList(Set confirmedPartns, Set unknownPartns, Set deniedPartns) { - this.confirmedPartns = confirmedPartns; - this.unknownPartns = unknownPartns; - this.deniedPartns = deniedPartns; - } - - /** - * get confirmed partitions - * @return confirmedPartns confirmed paritions - */ - public Set getConfirmedPartns() { - return confirmedPartns; - } - - /** - * get unknown partitions - * @return unknownPartns unknown paritions - */ - public Set getUnknownPartns() { - return unknownPartns; - } - - /** - * get denied partitions - * @return deniedPartns denied paritions - */ - public Set getDeniedPartns() { - return deniedPartns; - } - - /** - * set confirmed partitions - * @param confirmedPartns confirmed paritions - */ - public void setConfirmedPartns(Set confirmedPartns) { - this.confirmedPartns = confirmedPartns; - } - - /** - * set unknown partitions - * @param unknownPartns unknown partitions - */ - public void setUnknownPartns(Set unknownPartns) { - this.unknownPartns = unknownPartns; - } - } - /** * From the table metadata prune the partitions to return the partitions. * Evaluate the parition pruner for each partition and return confirmed and unknown partitions separately */ @SuppressWarnings("nls") public PrunedPartitionList prune() throws HiveException { - LOG.trace("Started pruning partiton"); - LOG.trace("tabname = " + this.tab.getName()); - LOG.trace("prune Expression = " + this.prunerExpr); - - LinkedHashSet true_parts = new LinkedHashSet(); - LinkedHashSet unkn_parts = new LinkedHashSet(); - LinkedHashSet denied_parts = new LinkedHashSet(); - - try { - StructObjectInspector rowObjectInspector = (StructObjectInspector)this.tab.getDeserializer().getObjectInspector(); - Object[] rowWithPart = new Object[2]; - - if(tab.isPartitioned()) { - for(String partName: Hive.get().getPartitionNames(MetaStoreUtils.DEFAULT_DATABASE_NAME, tab.getName(), (short) -1)) { - // Set all the variables here - LinkedHashMap partSpec = Warehouse.makeSpecFromName(partName); - LOG.debug("part name: " + partName); - // Create the row object - ArrayList partNames = new ArrayList(); - ArrayList partValues = new ArrayList(); - ArrayList partObjectInspectors = new ArrayList(); - for(Map.Entryentry : partSpec.entrySet()) { - partNames.add(entry.getKey()); - partValues.add(entry.getValue()); - partObjectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - } - StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors); - - rowWithPart[1] = partValues; - ArrayList ois = new ArrayList(2); - ois.add(rowObjectInspector); - ois.add(partObjectInspector); - StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois); - - // evaluate the expression tree - if (this.prunerExpr != null) { - ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr); - ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector); - Object evaluateResultO = evaluator.evaluate(rowWithPart); - Boolean r = (Boolean) ((PrimitiveObjectInspector)evaluateResultOI).getPrimitiveJavaObject(evaluateResultO); - LOG.trace("prune result for partition " + partSpec + ": " + r); - if (Boolean.FALSE.equals(r)) { - if (denied_parts.isEmpty()) { - Partition part = Hive.get().getPartition(tab, partSpec, Boolean.FALSE); - denied_parts.add(part); - } - LOG.trace("pruned partition: " + partSpec); - } else { - Partition part = Hive.get().getPartition(tab, partSpec, Boolean.FALSE); - if (Boolean.TRUE.equals(r)) { - LOG.debug("retained partition: " + partSpec); - true_parts.add(part); - } else { - LOG.debug("unknown partition: " + partSpec); - unkn_parts.add(part); - } - } - } else { - // is there is no parition pruning, all of them are needed - true_parts.add(Hive.get().getPartition(tab, partSpec, Boolean.FALSE)); - } - } - } else { - true_parts.addAll(Hive.get().getPartitions(tab)); - } - } catch (Exception e) { - throw new HiveException(e); - } - - // Now return the set of partitions - return new PrunedPartitionList(true_parts, unkn_parts, denied_parts); + return org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(this.tab, this.prunerExpr); } public Table getTable() { Property changes on: ql/src/java/org/apache/hadoop/hive/ql/parse/ASTPartitionPruner.java ___________________________________________________________________ Added: svn:mergeinfo Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy) @@ -93,7 +93,8 @@ // If the current subExpression is pre-calculated, as in Group-By etc. ColumnInfo colInfo = input.get("", expr.toStringTree()); if (colInfo != null) { - desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); + desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(), + colInfo.getTabAlias(), colInfo.getIsPartitionCol()); return desc; } return desc; @@ -326,7 +327,8 @@ } } else { // It's a column. - return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); + return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(), + colInfo.getTabAlias(), colInfo.getIsPartitionCol()); } } @@ -627,7 +629,8 @@ ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1))); return null; } - return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); + return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(), + colInfo.getTabAlias(), colInfo.getIsPartitionCol()); } // Return nulls for conversion operators Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -51,8 +51,6 @@ import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; -import org.apache.hadoop.hive.ql.exec.UDAF; -import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; @@ -111,7 +109,6 @@ import org.apache.hadoop.hive.ql.plan.unionDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.serde2.Deserializer; @@ -136,7 +133,8 @@ */ public class SemanticAnalyzer extends BaseSemanticAnalyzer { - private HashMap aliasToPruner; + private HashMap aliasToPruner; + private HashMap opToPartPruner; private HashMap aliasToSamplePruner; private HashMap> topOps; private HashMap> topSelOps; @@ -144,6 +142,7 @@ private List loadTableWork; private List loadFileWork; private Map joinContext; + private HashMap topToTable; private QB qb; private ASTNode ast; private int destTableId; @@ -168,7 +167,8 @@ super(conf); - this.aliasToPruner = new HashMap(); + this.aliasToPruner = new HashMap(); + this.opToPartPruner = new HashMap(); this.aliasToSamplePruner = new HashMap(); this.topOps = new HashMap>(); this.topSelOps = new HashMap>(); @@ -176,6 +176,7 @@ this.loadFileWork = new ArrayList(); opParseCtx = new LinkedHashMap, OpParseContext>(); joinContext = new HashMap(); + topToTable = new HashMap(); this.destTableId = 1; this.uCtx = null; this.listMapJoinOpsNoReducer = new ArrayList(); @@ -205,6 +206,7 @@ public void init(ParseContext pctx) { aliasToPruner = pctx.getAliasToPruner(); + opToPartPruner = pctx.getOpToPartPruner(); aliasToSamplePruner = pctx.getAliasToSamplePruner(); topOps = pctx.getTopOps(); topSelOps = pctx.getTopSelOps(); @@ -221,8 +223,8 @@ } public ParseContext getParseContext() { - return new ParseContext(conf, qb, ast, aliasToPruner, aliasToSamplePruner, topOps, - topSelOps, opParseCtx, joinContext, loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, + return new ParseContext(conf, qb, ast, aliasToPruner, opToPartPruner, aliasToSamplePruner, topOps, + topSelOps, opParseCtx, joinContext, topToTable, loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer); } @@ -232,7 +234,7 @@ assert (ast.getToken() != null); switch (ast.getToken().getType()) { - case HiveParser.TOK_QUERY: { + case HiveParser.TOK_QUERY: { QB qb = new QB(id, alias, true); doPhase1(ast, qb, initPhase1Ctx()); qbexpr.setOpcode(QBExpr.Opcode.NULLOP); @@ -603,7 +605,9 @@ for (String alias : qb.getTabAliases()) { String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); - PartitionPruner pruner = new PartitionPruner(alias, qb.getMetaData()); + org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pruner = + new org.apache.hadoop.hive.ql.parse.ASTPartitionPruner(alias, qb.getMetaData()); + // Pass each where clause to the pruner for(String clause: qbp.getClauseNames()) { @@ -622,7 +626,8 @@ for (String alias : qb.getQbJoinTree().getBaseSrc()) { if (alias != null) { String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); - PartitionPruner pruner = this.aliasToPruner.get(alias_id); + org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pruner = + this.aliasToPruner.get(alias_id); if(pruner == null) { // this means that the alias is a subquery pos++; @@ -649,7 +654,7 @@ for (String alias : qb.getTabAliases()) { String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias); - PartitionPruner pruner = this.aliasToPruner.get(alias_id); + org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pruner = this.aliasToPruner.get(alias_id); if (joinPartnPruner.get(alias_id) == null) { // Pass each where clause to the pruner for(String clause: qbp.getClauseNames()) { @@ -1003,7 +1008,7 @@ RowResolver inputRR = inputCtx.getRR(); Operator output = putOpInsertMap( OperatorFactory.getAndMakeChild( - new filterDesc(genExprNodeDesc(condn, inputRR)), + new filterDesc(genExprNodeDesc(condn, inputRR), false), new RowSchema(inputRR.getColumnInfos()), input), inputRR); LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: " + inputRR.toString()); @@ -1014,7 +1019,7 @@ private Integer genColListRegex(String colRegex, String tabAlias, String alias, ASTNode sel, ArrayList col_list, RowResolver input, Integer pos, RowResolver output) throws SemanticException { - + // The table alias should exist if (tabAlias != null && !input.hasTableAlias(tabAlias)) throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(sel)); @@ -1045,9 +1050,13 @@ continue; } - exprNodeColumnDesc expr = new exprNodeColumnDesc(colInfo.getType(), name); + exprNodeColumnDesc expr = new exprNodeColumnDesc(colInfo.getType(), name, + colInfo.getTabAlias(), + colInfo.getIsPartitionCol()); col_list.add(expr); - output.put(tmp[0], tmp[1], new ColumnInfo(getColumnInternalName(pos), colInfo.getType())); + output.put(tmp[0], tmp[1], + new ColumnInfo(getColumnInternalName(pos), colInfo.getType(), + colInfo.getTabAlias(), colInfo.getIsPartitionCol())); pos = Integer.valueOf(pos.intValue() + 1); matched ++; } @@ -1120,7 +1129,7 @@ out_rwsch.put( qb.getParseInfo().getAlias(), outputColList.get(i), - new ColumnInfo(outputColList.get(i), TypeInfoFactory.stringTypeInfo) // Script output is always a string + new ColumnInfo(outputColList.get(i), TypeInfoFactory.stringTypeInfo, null, false) // Script output is always a string ); } @@ -1317,7 +1326,7 @@ } out_rwsch.put(tabAlias, colAlias, new ColumnInfo(getColumnInternalName(pos), - exp.getTypeInfo())); + exp.getTypeInfo(), tabAlias, false)); pos = Integer.valueOf(pos.intValue() + 1); } } @@ -1479,11 +1488,12 @@ throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr)); } - groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName())); + groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), + exprInfo.getInternalName(), "", false)); String field = getColumnInternalName(i); outputColumnNames.add(field); groupByOutputRowResolver.put("",grpbyExpr.toStringTree(), - new ColumnInfo(field, exprInfo.getType())); + new ColumnInfo(field, exprInfo.getType(), null, false)); colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1)); } // For each aggregation @@ -1509,7 +1519,10 @@ String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); - aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExprInfo.getInternalName())); + aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), + paraExprInfo.getInternalName(), + paraExprInfo.getTabAlias(), + paraExprInfo.getIsPartitionCol())); } boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI; @@ -1523,7 +1536,7 @@ outputColumnNames.add(field); groupByOutputRowResolver.put("",value.toStringTree(), new ColumnInfo(field, - udaf.returnType)); + udaf.returnType, "", false)); // Save the evaluator so that it can be used by the next-stage GroupByOperators if (genericUDAFEvaluators != null) { genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator); @@ -1572,11 +1585,14 @@ throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr)); } - groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName())); + groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), + exprInfo.getInternalName(), + exprInfo.getTabAlias(), + exprInfo.getIsPartitionCol())); String field = getColumnInternalName(i); outputColumnNames.add(field); groupByOutputRowResolver.put("",grpbyExpr.toStringTree(), - new ColumnInfo(field, exprInfo.getType())); + new ColumnInfo(field, exprInfo.getType(), "", false)); colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1)); } @@ -1606,7 +1622,10 @@ String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); - aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExprInfo.getInternalName())); + aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), + paraExprInfo.getInternalName(), + paraExprInfo.getTabAlias(), + paraExprInfo.getIsPartitionCol())); } } else { @@ -1617,7 +1636,9 @@ } String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); - aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression)); + aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression, + paraExprInfo.getTabAlias(), + paraExprInfo.getIsPartitionCol())); } boolean isDistinct = (value.getType() == HiveParser.TOK_FUNCTIONDI); Mode amode = groupByDescModeToUDAFMode(mode, isDistinct); @@ -1638,7 +1659,9 @@ (mode != groupByDesc.Mode.FINAL && isDistinct), amode)); String field = getColumnInternalName(groupByKeys.size() + aggregations.size() - 1); outputColumnNames.add(field); - groupByOutputRowResolver.put("", value.toStringTree(), new ColumnInfo(field, udaf.returnType)); + groupByOutputRowResolver.put("", value.toStringTree(), + new ColumnInfo(field, + udaf.returnType, "", false)); } Operator op = putOpInsertMap( @@ -1681,7 +1704,7 @@ String field = getColumnInternalName(i); outputColumnNames.add(field); groupByOutputRowResolver.put("",grpbyExpr.toStringTree(), - new ColumnInfo(field, grpByExprNode.getTypeInfo())); + new ColumnInfo(field, grpByExprNode.getTypeInfo(), "", false)); colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1)); } @@ -1699,7 +1722,7 @@ numDistn++; String field = getColumnInternalName(grpByExprs.size() + numDistn -1); outputColumnNames.add(field); - groupByOutputRowResolver.put("", text, new ColumnInfo(field, distExprNode.getTypeInfo())); + groupByOutputRowResolver.put("", text, new ColumnInfo(field, distExprNode.getTypeInfo(), "", false)); colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1)); } } @@ -1735,7 +1758,7 @@ outputColumnNames.add(field); groupByOutputRowResolver.put("",value.toStringTree(), new ColumnInfo(field, - udaf.returnType)); + udaf.returnType, "", false)); // Save the evaluator so that it can be used by the next-stage GroupByOperators if (genericUDAFEvaluators != null) { genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator); @@ -1787,7 +1810,7 @@ outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1)); String field = Utilities.ReduceField.KEY.toString() + "." + getColumnInternalName(reduceKeys.size() - 1); ColumnInfo colInfo = new ColumnInfo(field, - reduceKeys.get(reduceKeys.size()-1).getTypeInfo()); + reduceKeys.get(reduceKeys.size()-1).getTypeInfo(), null, false); reduceSinkOutputRowResolver.put("", text, colInfo); colExprMap.put(colInfo.getInternalName(), inputExpr); } else { @@ -1807,7 +1830,7 @@ outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1)); String field = Utilities.ReduceField.KEY.toString() + "." + getColumnInternalName(reduceKeys.size() - 1); ColumnInfo colInfo = new ColumnInfo(field, - reduceKeys.get(reduceKeys.size()-1).getTypeInfo()); + reduceKeys.get(reduceKeys.size()-1).getTypeInfo(), null, false); reduceSinkOutputRowResolver.put("", text, colInfo); colExprMap.put(colInfo.getInternalName(), reduceKeys.get(reduceKeys.size()-1)); } @@ -1831,7 +1854,8 @@ String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1); reduceSinkOutputRowResolver.put("", text, new ColumnInfo(field, - reduceValues.get(reduceValues.size()-1).getTypeInfo())); + reduceValues.get(reduceValues.size()-1).getTypeInfo(), + null, false)); } } } @@ -1844,13 +1868,14 @@ for (Map.Entry entry : aggregationTrees.entrySet()) { TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(inputField).getType(); - reduceValues.add(new exprNodeColumnDesc(type, getColumnInternalName(inputField))); + reduceValues.add(new exprNodeColumnDesc(type, getColumnInternalName(inputField), + "", false)); inputField++; outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1)); String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1); reduceSinkOutputRowResolver.put("", ((ASTNode)entry.getValue()).toStringTree(), new ColumnInfo(field, - type)); + type, null, false)); } } @@ -1895,10 +1920,10 @@ String field = getColumnInternalName(i); outputColumnNames.add(field); TypeInfo typeInfo = reduceSinkInputRowResolver2.get("", grpbyExpr.toStringTree()).getType(); - exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(typeInfo, field); + exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(typeInfo, field, "", false); reduceKeys.add(inputExpr); ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + field, - typeInfo); + typeInfo, "", false); reduceSinkOutputRowResolver2.put("", grpbyExpr.toStringTree(), colInfo); colExprMap.put(colInfo.getInternalName(), inputExpr); @@ -1912,13 +1937,13 @@ String field = getColumnInternalName(inputField); ASTNode t = entry.getValue(); TypeInfo typeInfo = reduceSinkInputRowResolver2.get("", t.toStringTree()).getType(); - reduceValues.add(new exprNodeColumnDesc(typeInfo, field)); + reduceValues.add(new exprNodeColumnDesc(typeInfo, field, "", false)); inputField++; String col = getColumnInternalName(reduceValues.size()-1); outputColumnNames.add(col); reduceSinkOutputRowResolver2.put("", t.toStringTree(), new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + col, - typeInfo)); + typeInfo, "", false)); } ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap( @@ -1966,11 +1991,13 @@ } String expression = exprInfo.getInternalName(); - groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), expression)); + groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), expression, + exprInfo.getTabAlias(), + exprInfo.getIsPartitionCol())); String field = getColumnInternalName(i); outputColumnNames.add(field); groupByOutputRowResolver2.put("",grpbyExpr.toStringTree(), - new ColumnInfo(field, exprInfo.getType())); + new ColumnInfo(field, exprInfo.getType(), "", false)); colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1)); } HashMap aggregationTrees = parseInfo @@ -1985,7 +2012,9 @@ } String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); - aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression)); + aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression, + paraExprInfo.getTabAlias(), + paraExprInfo.getIsPartitionCol())); String aggName = value.getChild(0).getText(); @@ -2001,7 +2030,7 @@ outputColumnNames.add(field); groupByOutputRowResolver2.put("", value.toStringTree(), new ColumnInfo(field, - udaf.returnType)); + udaf.returnType, "", false)); } Operator op = putOpInsertMap( @@ -2528,7 +2557,8 @@ List fields = rowObjectInspector.getAllStructFieldRefs(); for (int i=0; i colName = new ArrayList(); for (int i=0; i colExprMap = new HashMap(); ArrayList valueCols = new ArrayList(); for(ColumnInfo colInfo: inputRR.getColumnInfos()) { - valueCols.add(new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName())); + valueCols.add(new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(), + colInfo.getTabAlias(), colInfo.getIsPartitionCol())); colExprMap.put(colInfo.getInternalName(), valueCols.get(valueCols.size() - 1)); } @@ -2756,13 +2788,15 @@ for(ColumnInfo colInfo: interim_rwsch.getColumnInfos()) { String [] info = interim_rwsch.reverseLookup(colInfo.getInternalName()); out_rwsch.put(info[0], info[1], - new ColumnInfo(getColumnInternalName(pos), colInfo.getType())); + new ColumnInfo(getColumnInternalName(pos), colInfo.getType(), info[0], false)); pos = Integer.valueOf(pos.intValue() + 1); } Operator output = putOpInsertMap( OperatorFactory.getAndMakeChild( - new extractDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, Utilities.ReduceField.VALUE.toString())), + new extractDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, + Utilities.ReduceField.VALUE.toString(), + "", false)), new RowSchema(out_rwsch.getColumnInfos()), interim), out_rwsch); @@ -2808,14 +2842,17 @@ { String field = fNamesIter.next(); ColumnInfo valueInfo = inputRS.get(key, field); - keyDesc.add(new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName())); + keyDesc.add(new exprNodeColumnDesc(valueInfo.getType(), + valueInfo.getInternalName(), + valueInfo.getTabAlias(), + valueInfo.getIsPartitionCol())); if (outputRS.get(key, field) == null) { String colName = getColumnInternalName(outputPos); outputPos++; outputColumnNames.add(colName); colExprMap.put(colName, keyDesc.get(keyDesc.size() - 1)); outputRS.put(key, field, new ColumnInfo(colName, - valueInfo.getType())); + valueInfo.getType(), key, false)); reversedExprs.put(colName, tag); } } @@ -2866,14 +2903,17 @@ for (Map.Entry entry : fMap.entrySet()) { String field = entry.getKey(); ColumnInfo valueInfo = entry.getValue(); - exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName()); + exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(valueInfo.getType(), + valueInfo.getInternalName(), + valueInfo.getTabAlias(), + valueInfo.getIsPartitionCol()); reduceValues.add(inputExpr); if (outputRS.get(src, field) == null) { String col = getColumnInternalName(reduceValues.size() - 1); outputColumns.add(col); ColumnInfo newColInfo = new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + col, - valueInfo.getType()); + valueInfo.getType(), src, false); colExprMap.put(newColInfo.getInternalName(), inputExpr); outputRS.put(src, field, newColInfo); } @@ -3306,7 +3346,8 @@ ArrayList columnNames = new ArrayList(); for (int i = 0; i < columns.size(); i++) { ColumnInfo col = columns.get(i); - colList.add(new exprNodeColumnDesc(col.getType(), col.getInternalName())); + colList.add(new exprNodeColumnDesc(col.getType(), col.getInternalName(), + col.getTabAlias(), col.getIsPartitionCol())); columnNames.add(col.getInternalName()); } Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild( @@ -3405,7 +3446,7 @@ outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1)); String field = Utilities.ReduceField.KEY.toString() + "." + getColumnInternalName(reduceKeys.size() - 1); ColumnInfo colInfo = new ColumnInfo(field, - reduceKeys.get(reduceKeys.size()-1).getTypeInfo()); + reduceKeys.get(reduceKeys.size()-1).getTypeInfo(), "", false); reduceSinkOutputRowResolver.put("", text, colInfo); colExprMap.put(colInfo.getInternalName(), distExpr); } @@ -3423,7 +3464,7 @@ exprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr, inputRR); reduceValues.add(grpByExprNode); String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1); - ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo()); + ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo(), "", false); reduceSinkOutputRowResolver.put("", text, colInfo); outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1)); } @@ -3446,7 +3487,7 @@ exprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRR); reduceValues.add(paraExprNode); String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1); - ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo()); + ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo(), "", false); reduceSinkOutputRowResolver.put("", text, colInfo); outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1)); } @@ -3727,7 +3768,8 @@ for (String col : bucketCols) { ColumnInfo ci = rwsch.get(alias, col); // TODO: change type to the one in the table schema - args.add(new exprNodeColumnDesc(ci.getType(), ci.getInternalName())); + args.add(new exprNodeColumnDesc(ci.getType(), ci.getInternalName(), + ci.getTabAlias(), ci.getIsPartitionCol())); } } else { @@ -3777,7 +3819,8 @@ for (int i=0; i> iterP = aliasToPruner.entrySet().iterator(); - PartitionPruner pr = ((Map.Entry)iterP.next()).getValue(); + Iterator> iterP = + aliasToPruner.entrySet().iterator(); + org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pr = + ((Map.Entry)iterP.next()).getValue(); if (pr.onlyContainsPartitionCols()) { List listP = new ArrayList(); List partP = new ArrayList(); - PartitionPruner.PrunedPartitionList partsList = null; + PrunedPartitionList partsList = null; Set parts = null; try { partsList = pr.prune(); @@ -4204,9 +4253,9 @@ genPlan(qb); - ParseContext pCtx = new ParseContext(conf, qb, ast, aliasToPruner, aliasToSamplePruner, topOps, - topSelOps, opParseCtx, joinContext, loadTableWork, loadFileWork, - ctx, idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer); + ParseContext pCtx = new ParseContext(conf, qb, ast, aliasToPruner, opToPartPruner, aliasToSamplePruner, topOps, + topSelOps, opParseCtx, joinContext, topToTable, loadTableWork, loadFileWork, + ctx, idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer); Optimizer optm = new Optimizer(); optm.setPctx(pCtx); @@ -4218,7 +4267,7 @@ // Do any partition pruning genPartitionPruners(qb); LOG.info("Completed partition pruning"); - + // Do any sample pruning genSamplePruners(qb); LOG.info("Completed sample pruning"); @@ -4252,7 +4301,8 @@ // If the current subExpression is pre-calculated, as in Group-By etc. ColumnInfo colInfo = input.get("", expr.toStringTree()); if (colInfo != null) { - return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); + return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(), + colInfo.getTabAlias(), colInfo.getIsPartitionCol()); } // Create the walker, the rules dispatcher and the context. Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (revision 801363) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (working copy) @@ -27,10 +27,13 @@ import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.plan.exprNodeDesc; import org.apache.hadoop.hive.ql.plan.loadFileDesc; import org.apache.hadoop.hive.ql.plan.loadTableDesc; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext; /** @@ -46,12 +49,14 @@ public class ParseContext { private QB qb; private ASTNode ast; - private HashMap aliasToPruner; + private HashMap aliasToPruner; + private HashMap opToPartPruner; private HashMap aliasToSamplePruner; private HashMap> topOps; private HashMap> topSelOps; private LinkedHashMap, OpParseContext> opParseCtx; private Map joinContext; + private HashMap topToTable; private List loadTableWork; private List loadFileWork; private Context ctx; @@ -60,6 +65,12 @@ private int destTableId; private UnionProcContext uCtx; private List listMapJoinOpsNoReducer; // list of map join operators with no reducer + + // is set to true if the expression only contains partitioning columns and not any other column reference. + // This is used to optimize select * from table where ... scenario, when the where condition only references + // partitioning columns - the partitions are identified and streamed directly to the client without requiring + // a map-reduce job + private boolean hasNonPartCols; public ParseContext() { } @@ -71,6 +82,8 @@ * current parse tree * @param aliasToPruner * partition pruner list + * @param opToPartPruner + * map from table scan operator to partition pruner * @param aliasToSamplePruner * sample pruner list * @param loadFileWork @@ -85,15 +98,19 @@ * @param topSelOps * list of operators for the selects introduced for column pruning * @param listMapJoinOpsNoReducer - * list of map join operators with no reducer + * list of map join operators with no reducer + * @param hasNonPartCols + * the query has non partition columns */ public ParseContext(HiveConf conf, QB qb, ASTNode ast, - HashMap aliasToPruner, + HashMap aliasToPruner, + HashMap opToPartPruner, HashMap aliasToSamplePruner, HashMap> topOps, HashMap> topSelOps, LinkedHashMap, OpParseContext> opParseCtx, Map joinContext, + HashMap topToTable, List loadTableWork, List loadFileWork, Context ctx, HashMap idToTableNameMap, int destTableId, UnionProcContext uCtx, List listMapJoinOpsNoReducer) { @@ -101,8 +118,10 @@ this.qb = qb; this.ast = ast; this.aliasToPruner = aliasToPruner; + this.opToPartPruner = opToPartPruner; this.aliasToSamplePruner = aliasToSamplePruner; this.joinContext = joinContext; + this.topToTable = topToTable; this.loadFileWork = loadFileWork; this.loadTableWork = loadTableWork; this.opParseCtx = opParseCtx; @@ -113,6 +132,7 @@ this.destTableId = destTableId; this.uCtx = uCtx; this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer; + this.hasNonPartCols = false; } /** @@ -178,7 +198,7 @@ /** * @return the aliasToPruner */ - public HashMap getAliasToPruner() { + public HashMap getAliasToPruner() { return aliasToPruner; } @@ -186,11 +206,40 @@ * @param aliasToPruner * the aliasToPruner to set */ - public void setAliasToPruner(HashMap aliasToPruner) { + public void setAliasToPruner(HashMap aliasToPruner) { this.aliasToPruner = aliasToPruner; } /** + * @return the opToPartPruner + */ + public HashMap getOpToPartPruner() { + return opToPartPruner; + } + + /** + * @param opToPartPruner + * the opToPartPruner to set + */ + public void setOpToPartPruner(HashMap opToPartPruner) { + this.opToPartPruner = opToPartPruner; + } + + /** + * @return the topToTable + */ + public HashMap getTopToTable() { + return topToTable; + } + + /** + * @param topToTable + * the topToTable to set + */ + public void setTopToTable(HashMap topToTable) { + this.topToTable = topToTable; + } + /** * @return the aliasToSamplePruner */ public HashMap getAliasToSamplePruner() { @@ -335,4 +384,19 @@ List listMapJoinOpsNoReducer) { this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer; } + + /** + * Sets the hasNonPartCols flag + * @param val + */ + public void setHasNonPartCols(boolean val) { + this.hasNonPartCols = val; + } + + /** + * Gets the value of the hasNonPartCols flag + */ + public boolean getHasNonPartCols() { + return this.hasNonPartCols; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java (revision 0) @@ -0,0 +1,87 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import java.util.Set; + +import org.apache.hadoop.hive.ql.metadata.Partition; + +/** + * The list of pruned partitions. + */ +public class PrunedPartitionList { + // confirmed partitions - satisfy the partition criteria + private Set confirmedPartns; + + // unknown partitions - may/may not satisfy the partition criteria + private Set unknownPartns; + + // denied partitions - do not satisfy the partition criteria + private Set deniedPartns; + + /** + * @param confirmedPartns confirmed paritions + * @param unknownPartns unknown partitions + */ + public PrunedPartitionList(Set confirmedPartns, Set unknownPartns, Set deniedPartns) { + this.confirmedPartns = confirmedPartns; + this.unknownPartns = unknownPartns; + this.deniedPartns = deniedPartns; + } + + /** + * get confirmed partitions + * @return confirmedPartns confirmed paritions + */ + public Set getConfirmedPartns() { + return confirmedPartns; + } + + /** + * get unknown partitions + * @return unknownPartns unknown paritions + */ + public Set getUnknownPartns() { + return unknownPartns; + } + + /** + * get denied partitions + * @return deniedPartns denied paritions + */ + public Set getDeniedPartns() { + return deniedPartns; + } + + /** + * set confirmed partitions + * @param confirmedPartns confirmed paritions + */ + public void setConfirmedPartns(Set confirmedPartns) { + this.confirmedPartns = confirmedPartns; + } + + /** + * set unknown partitions + * @param unknownPartns unknown partitions + */ + public void setUnknownPartns(Set unknownPartns) { + this.unknownPartns = unknownPartns; + } +}