Index: conf/hive-default.xml
===================================================================
--- conf/hive-default.xml (revision 1158048)
+++ conf/hive-default.xml (working copy)
@@ -361,6 +361,12 @@
+ hive.ppd.recognizetransivity
+ true
+ Whether to transitively replicate predicate filters over equijoin conditions.
+
+
+
hive.optimize.pruner
true
Whether to enable the new partition pruner which depends on predicate pushdown. If this is disabled,
@@ -533,12 +539,6 @@
- hive.exec.failure.hooks
-
- Comma-separated list of on-failure hooks to be invoked for each statement. An on-failure hook is specified as the name of Java class which implements the org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext interface.
-
-
-
hive.merge.mapfiles
true
Merge small files at the end of a map-only job
@@ -936,7 +936,7 @@
hive.exec.show.job.failure.debug.info
- true
+ false
If a job fails, whether to provide a link in the CLI to the task with the
most failures, along with debugging hints if applicable.
@@ -1144,10 +1144,4 @@
by record readers
-
- hive.exec.perf.logger
- org.apache.hadoop.hive.ql.log.PerfLogger
- The class responsible logging client side performance metrics. Must be a subclass of org.apache.hadoop.hive.ql.log.PerfLogger
-
-
Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1158048)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -371,6 +371,7 @@
HIVEOPTCP("hive.optimize.cp", true), // column pruner
HIVEOPTINDEXFILTER("hive.optimize.index.filter", false), // automatically use indexes
HIVEOPTPPD("hive.optimize.ppd", true), // predicate pushdown
+ HIVEPPDRECOGNIZETRANSITIVITY("hive.ppd.recognizetransivity", true), // predicate pushdown
HIVEPPDREMOVEDUPLICATEFILTERS("hive.ppd.remove.duplicatefilters", true),
// push predicates down to storage handlers
HIVEOPTPPD_STORAGE("hive.optimize.ppd.storage", true),
Index: ql/src/test/results/clientpositive/ppd_clusterby.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_clusterby.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_clusterby.q.out (working copy)
@@ -64,11 +64,11 @@
PREHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-22_369_8709666241977580995/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-34_074_7000535872788278693/-mr-10000
POSTHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-22_369_8709666241977580995/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-34_074_7000535872788278693/-mr-10000
10 val_10
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -112,18 +112,22 @@
y
TableScan
alias: y
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (key = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -156,7 +160,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/sdong/hive_2011-02-10_16-56-25_857_6745085956410995305/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_15-36-37_183_5233818336294938355/-mr-10002
Reduce Output Operator
key expressions:
expr: _col1
@@ -190,11 +194,11 @@
PREHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-25_950_4577225496126879083/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-37_259_5028248286950507899/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-06-51_471_1696629323983265212/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-37_259_5028248286950507899/-mr-10000
20 val_20 20
PREHOOK: query: EXPLAIN
SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
@@ -258,11 +262,11 @@
PREHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-07_755_9086280220307924310/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-43_271_3465064583325875867/-mr-10000
POSTHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-07_755_9086280220307924310/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-43_271_3465064583325875867/-mr-10000
10 val_10
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -306,18 +310,22 @@
y
TableScan
alias: y
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (key = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -346,7 +354,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-16_261_4934200005006221322/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_15-36-46_356_4821991475262302737/-mr-10002
Reduce Output Operator
key expressions:
expr: _col1
@@ -380,9 +388,9 @@
PREHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-16_525_2344085515974635436/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-46_432_7875891648142135101/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-16_525_2344085515974635436/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-36-46_432_7875891648142135101/-mr-10000
20 val_20 20
Index: ql/src/test/results/clientpositive/ppd_outer_join2.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join2.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_outer_join2.q.out (working copy)
@@ -30,20 +30,24 @@
a
TableScan
alias: a
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: ((key > '15') and (key < '25'))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
b
TableScan
alias: b
@@ -110,7 +114,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-34_464_515670423172601583/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-12_487_4675144577619468840/-mr-10000
POSTHOOK: query: FROM
src a
RIGHT OUTER JOIN
@@ -120,7 +124,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-34_464_515670423172601583/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-12_487_4675144577619468840/-mr-10000
150 val_150 150 val_150
152 val_152 152 val_152
152 val_152 152 val_152
@@ -279,20 +283,24 @@
a
TableScan
alias: a
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: ((key > '15') and (key < '25'))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
b
TableScan
alias: b
@@ -359,7 +367,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-44_509_6575656481631571931/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-15_904_8893399901344877562/-mr-10000
POSTHOOK: query: FROM
src a
RIGHT OUTER JOIN
@@ -369,7 +377,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-44_509_6575656481631571931/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-15_904_8893399901344877562/-mr-10000
150 val_150 150 val_150
152 val_152 152 val_152
152 val_152 152 val_152
Index: ql/src/test/results/clientpositive/join38.q.out
===================================================================
--- ql/src/test/results/clientpositive/join38.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/join38.q.out (working copy)
@@ -26,11 +26,11 @@
PREHOOK: query: select * from tmp
PREHOOK: type: QUERY
PREHOOK: Input: default@tmp
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-07-01_449_2704240234253573656/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-53-23_764_3301298329074450240/-mr-10000
POSTHOOK: query: select * from tmp
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tmp
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-07-01_449_2704240234253573656/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-53-23_764_3301298329074450240/-mr-10000
POSTHOOK: Lineage: tmp.col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: tmp.col1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: tmp.col10 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -89,15 +89,19 @@
a
TableScan
alias: a
- HashTable Sink Operator
- condition expressions:
- 0 {value}
- 1 {col5} {col11}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[col11]]
- Position of Big Table: 1
+ Filter Operator
+ predicate:
+ expr: (key = 111)
+ type: boolean
+ HashTable Sink Operator
+ condition expressions:
+ 0 {value}
+ 1 {col5} {col11}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[col11]]
+ Position of Big Table: 1
Stage: Stage-1
Map Reduce
@@ -133,7 +137,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/sdong/hive_2011-02-10_16-07-01_714_3920059174195065578/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-53-23_963_3349528858179007798/-mr-10002
Select Operator
expressions:
expr: _col1
@@ -217,7 +221,7 @@
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Input: default@tmp
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-07-01_819_8577478512687826786/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-53-24_044_6878564084853277625/-mr-10000
POSTHOOK: query: FROM src a JOIN tmp b ON (a.key = b.col11)
SELECT /*+ MAPJOIN(a) */ a.value, b.col5, count(1) as count
where b.col11 = 111
@@ -225,7 +229,7 @@
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Input: default@tmp
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-07-01_819_8577478512687826786/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-53-24_044_6878564084853277625/-mr-10000
POSTHOOK: Lineage: tmp.col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: tmp.col1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: tmp.col10 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
Index: ql/src/test/results/clientpositive/cluster.q.out
===================================================================
--- ql/src/test/results/clientpositive/cluster.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/cluster.q.out (working copy)
@@ -60,11 +60,11 @@
PREHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-00_308_3996892596871050805/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-40_010_3384539938958696218/-mr-10000
POSTHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-00_308_3996892596871050805/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-40_010_3384539938958696218/-mr-10000
10 val_10
PREHOOK: query: EXPLAIN
SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
@@ -128,11 +128,11 @@
PREHOOK: query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-05_095_3989764442537175585/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-43_118_4008555077620962492/-mr-10000
POSTHOOK: query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-05_095_3989764442537175585/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-43_118_4008555077620962492/-mr-10000
20 val_20
PREHOOK: query: EXPLAIN
SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
@@ -196,11 +196,11 @@
PREHOOK: query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-08_623_1909163536697384354/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-46_242_6968529164569055456/-mr-10000
POSTHOOK: query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-08_623_1909163536697384354/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-46_242_6968529164569055456/-mr-10000
20 val_20
PREHOOK: query: EXPLAIN
SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -264,11 +264,11 @@
PREHOOK: query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-12_155_5678606790020408085/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-49_297_6381899498968859135/-mr-10000
POSTHOOK: query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-12_155_5678606790020408085/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-49_297_6381899498968859135/-mr-10000
20 val_20
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
@@ -332,11 +332,11 @@
PREHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-15_632_3313925045921329153/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-52_360_3921002830592058578/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-15_632_3313925045921329153/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-52_360_3921002830592058578/-mr-10000
20 val_20
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -400,11 +400,11 @@
PREHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-19_114_1622685930107208593/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-55_413_1561312347468537043/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-19_114_1622685930107208593/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-55_413_1561312347468537043/-mr-10000
20 val_20
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
@@ -468,11 +468,11 @@
PREHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-22_595_5329567143269229226/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-58_483_4193468965645427950/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-22_595_5329567143269229226/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-05-58_483_4193468965645427950/-mr-10000
20 val_20
PREHOOK: query: EXPLAIN
SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
@@ -543,11 +543,11 @@
PREHOOK: query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-26_137_8371385798213040613/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-01_536_2296687090295261658/-mr-10000
POSTHOOK: query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-26_137_8371385798213040613/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-01_536_2296687090295261658/-mr-10000
20 val_20
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -591,18 +591,22 @@
y
TableScan
alias: y
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (key = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -631,7 +635,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/sdong/hive_2011-02-10_01-39-29_546_9044307911488476013/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-06-04_609_1013539649990230513/-mr-10002
Reduce Output Operator
key expressions:
expr: _col1
@@ -665,11 +669,11 @@
PREHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-29_640_7990093747029711700/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-04_683_8881086943921866376/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-29_640_7990093747029711700/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-04_683_8881086943921866376/-mr-10000
20 val_20 20
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -713,20 +717,24 @@
y
TableScan
alias: y
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: (key = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -757,7 +765,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/sdong/hive_2011-02-10_01-39-36_146_7523047334744872007/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-06-10_540_387097519944809883/-mr-10002
Reduce Output Operator
key expressions:
expr: _col1
@@ -793,11 +801,11 @@
PREHOOK: query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-36_271_8501113562060498197/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-10_620_508384745366093039/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-36_271_8501113562060498197/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-10_620_508384745366093039/-mr-10000
20 val_20 20 val_20
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
@@ -841,20 +849,24 @@
y
TableScan
alias: y
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: (key = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -885,7 +897,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/sdong/hive_2011-02-10_01-39-42_907_3531906775052670778/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-06-16_477_4713776691114383621/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -921,11 +933,11 @@
PREHOOK: query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-43_027_3013451718430380875/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-16_550_1693241149285515315/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-43_027_3013451718430380875/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-16_550_1693241149285515315/-mr-10000
20 val_20 20 val_20
PREHOOK: query: EXPLAIN
SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
@@ -969,18 +981,22 @@
y
TableScan
alias: y
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (key = 20)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -1009,7 +1025,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/sdong/hive_2011-02-10_01-39-49_649_313028914611607523/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-06-22_495_1958918890600969598/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -1043,11 +1059,11 @@
PREHOOK: query: SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-49_743_5449040629326573495/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-22_565_5589926363417940722/-mr-10000
POSTHOOK: query: SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-49_743_5449040629326573495/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-22_565_5589926363417940722/-mr-10000
20 val_20 20
PREHOOK: query: EXPLAIN
SELECT unioninput.*
@@ -1173,7 +1189,7 @@
CLUSTER BY unioninput.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-56_398_2632059281772742928/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-28_484_6154449119111735745/-mr-10000
POSTHOOK: query: SELECT unioninput.*
FROM (
FROM src select src.key, src.value WHERE src.key < 100
@@ -1183,7 +1199,7 @@
CLUSTER BY unioninput.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-39-56_398_2632059281772742928/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-06-28_484_6154449119111735745/-mr-10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/router_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy)
@@ -31,20 +31,25 @@
TableScan
alias: a
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 15) and (key < 25))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
b
TableScan
alias: b
@@ -72,13 +77,13 @@
type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -89,12 +94,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -105,16 +110,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -128,13 +133,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -145,17 +150,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -169,13 +174,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -186,17 +191,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -210,13 +215,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -227,17 +232,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -251,13 +256,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490004
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -268,13 +273,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -309,9 +314,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_17-01-51_944_8007180207485477580/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-49-45_315_1221990596724204746/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-01-51_944_8007180207485477580/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-49-45_315_1221990596724204746/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -341,7 +346,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-52_113_3892314008195573376/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-45_446_6864227253125426882/-mr-10000
POSTHOOK: query: FROM
src a
RIGHT OUTER JOIN
@@ -355,7 +360,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-52_113_3892314008195573376/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-45_446_6864227253125426882/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
@@ -401,20 +406,25 @@
TableScan
alias: a
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 15) and (key < 25))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
b
TableScan
alias: b
@@ -440,11 +450,11 @@
type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -455,12 +465,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -471,16 +481,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -494,13 +504,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -511,17 +521,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -535,13 +545,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -552,13 +562,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -590,9 +600,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_17-01-56_698_3650606708384403170/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-49-49_414_6366150176434092424/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-01-56_698_3650606708384403170/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-49-49_414_6366150176434092424/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -620,7 +630,7 @@
PREHOOK: Input: default@src
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-56_851_1955742945505151056/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-49_530_837734850983794566/-mr-10000
POSTHOOK: query: FROM
srcpart a
RIGHT OUTER JOIN
@@ -632,7 +642,7 @@
POSTHOOK: Input: default@src
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-56_851_1955742945505151056/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-49_530_837734850983794566/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
@@ -678,20 +688,25 @@
TableScan
alias: a
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 15) and (key < 25))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
b
TableScan
alias: b
@@ -719,11 +734,11 @@
type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -734,12 +749,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -750,16 +765,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -773,13 +788,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -790,17 +805,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -814,13 +829,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -831,13 +846,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -869,9 +884,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_17-02-00_948_2573378846116151448/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-49-53_225_5814759782076240610/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-02-00_948_2573378846116151448/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-49-53_225_5814759782076240610/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -899,7 +914,7 @@
PREHOOK: Input: default@src
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-01_089_7899465838633645287/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-53_356_3808971112123446142/-mr-10000
POSTHOOK: query: FROM
src a
RIGHT OUTER JOIN
@@ -911,7 +926,7 @@
POSTHOOK: Input: default@src
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-01_089_7899465838633645287/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-53_356_3808971112123446142/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
@@ -957,22 +972,27 @@
TableScan
alias: a
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
- expr: ds
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 15) and (key < 25))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: ds
+ type: string
b
TableScan
alias: b
@@ -998,13 +1018,13 @@
type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [a]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1015,12 +1035,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1031,16 +1051,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1054,13 +1074,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1071,17 +1091,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1095,13 +1115,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1112,17 +1132,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1136,13 +1156,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1153,17 +1173,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1177,13 +1197,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490004
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1194,13 +1214,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -1232,9 +1252,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_17-02-05_196_2971739775305015185/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-49-57_047_5178153527613849897/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-02-05_196_2971739775305015185/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-49-57_047_5178153527613849897/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1264,7 +1284,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-05_351_3959616875574140681/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-57_168_4844713070381327545/-mr-10000
POSTHOOK: query: FROM
srcpart a
RIGHT OUTER JOIN
@@ -1278,7 +1298,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-05_351_3959616875574140681/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-49-57_168_4844713070381327545/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
Index: ql/src/test/results/clientpositive/join16.q.out
===================================================================
--- ql/src/test/results/clientpositive/join16.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/join16.q.out (working copy)
@@ -18,7 +18,7 @@
alias: a
Filter Operator
predicate:
- expr: ((key > 10) and (key > 20))
+ expr: (((key > 10) and (key > 20)) and (value < 200))
type: boolean
Select Operator
expressions:
Index: ql/src/test/results/clientpositive/ppd_transitivity.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_transitivity.q.out (revision 0)
+++ ql/src/test/results/clientpositive/ppd_transitivity.q.out (revision 0)
@@ -0,0 +1,395 @@
+PREHOOK: query: drop table invites
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table invites
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table invites2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table invites2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table invites (foo int, bar string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table invites (foo int, bar string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@invites
+PREHOOK: query: create table invites2 (foo int, bar string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table invites2 (foo int, bar string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@invites2
+PREHOOK: query: explain select count(*) from invites join invites2 on invites.ds=invites2.ds where invites.ds='2011-01-01'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from invites join invites2 on invites.ds=invites2.ds where invites.ds='2011-01-01'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME invites)) (TOK_TABREF (TOK_TABNAME invites2)) (= (. (TOK_TABLE_OR_COL invites) ds) (. (TOK_TABLE_OR_COL invites2) ds)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL invites) ds) '2011-01-01'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ invites
+ TableScan
+ alias: invites
+ Filter Operator
+ predicate:
+ expr: (ds = '2011-01-01')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: ds
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: ds
+ type: string
+ tag: 0
+ value expressions:
+ expr: ds
+ type: string
+ invites2
+ TableScan
+ alias: invites2
+ Filter Operator
+ predicate:
+ expr: (ds = '2011-01-01')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: ds
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: ds
+ type: string
+ tag: 1
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col2}
+ 1
+ handleSkewJoin: false
+ outputColumnNames: _col2
+ Select Operator
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ mode: hash
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+ file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-17_18-26-34_065_6364872548047301613/-mr-10002
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: bigint
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations:
+ expr: count(VALUE._col0)
+ bucketGroup: false
+ mode: mergepartial
+ outputColumnNames: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: bigint
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: explain select * from invites join invites2 on invites.ds=invites2.ds where invites.ds='2011-01-01'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from invites join invites2 on invites.ds=invites2.ds where invites.ds='2011-01-01'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME invites)) (TOK_TABREF (TOK_TABNAME invites2)) (= (. (TOK_TABLE_OR_COL invites) ds) (. (TOK_TABLE_OR_COL invites2) ds)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL invites) ds) '2011-01-01'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ invites
+ TableScan
+ alias: invites
+ Filter Operator
+ predicate:
+ expr: (ds = '2011-01-01')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: ds
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: ds
+ type: string
+ tag: 0
+ value expressions:
+ expr: foo
+ type: int
+ expr: bar
+ type: string
+ expr: ds
+ type: string
+ invites2
+ TableScan
+ alias: invites2
+ Filter Operator
+ predicate:
+ expr: (ds = '2011-01-01')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: ds
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: ds
+ type: string
+ tag: 1
+ value expressions:
+ expr: foo
+ type: int
+ expr: bar
+ type: string
+ expr: ds
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ expr: _col5
+ type: int
+ expr: _col6
+ type: string
+ expr: _col7
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: explain select * from (select a.foo, b.bar from invites a join invites2 b on a.foo=b.foo) c join invites d on c.foo=d.bar where d.bar='10'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from (select a.foo, b.bar from invites a join invites2 b on a.foo=b.foo) c join invites d on c.foo=d.bar where d.bar='10'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME invites) a) (TOK_TABREF (TOK_TABNAME invites2) b) (= (. (TOK_TABLE_OR_COL a) foo) (. (TOK_TABLE_OR_COL b) foo)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) foo)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) bar))))) c) (TOK_TABREF (TOK_TABNAME invites) d) (= (. (TOK_TABLE_OR_COL c) foo) (. (TOK_TABLE_OR_COL d) bar)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL d) bar) '10'))))
+
+STAGE DEPENDENCIES:
+ Stage-2 is a root stage
+ Stage-1 depends on stages: Stage-2
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+ c:a
+ TableScan
+ alias: a
+ Filter Operator
+ predicate:
+ expr: (foo = '10')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 0
+ value expressions:
+ expr: foo
+ type: int
+ c:b
+ TableScan
+ alias: b
+ Filter Operator
+ predicate:
+ expr: (foo = '10')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 1
+ value expressions:
+ expr: bar
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0}
+ 1 {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col6
+ Filter Operator
+ predicate:
+ expr: (_col0 = '10')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col6
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ $INTNAME
+ Reduce Output Operator
+ key expressions:
+ expr: UDFToDouble(_col0)
+ type: double
+ sort order: +
+ Map-reduce partition columns:
+ expr: UDFToDouble(_col0)
+ type: double
+ tag: 0
+ value expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: string
+ d
+ TableScan
+ alias: d
+ Filter Operator
+ predicate:
+ expr: (bar = '10')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: UDFToDouble(bar)
+ type: double
+ sort order: +
+ Map-reduce partition columns:
+ expr: UDFToDouble(bar)
+ type: double
+ tag: 1
+ value expressions:
+ expr: foo
+ type: int
+ expr: bar
+ type: string
+ expr: ds
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: string
+ expr: _col2
+ type: int
+ expr: _col3
+ type: string
+ expr: _col4
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: drop table invites
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@invites
+PREHOOK: Output: default@invites
+POSTHOOK: query: drop table invites
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@invites
+POSTHOOK: Output: default@invites
+PREHOOK: query: drop table invites2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@invites2
+PREHOOK: Output: default@invites2
+POSTHOOK: query: drop table invites2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@invites2
+POSTHOOK: Output: default@invites2
Index: ql/src/test/results/clientpositive/ppd_join.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_join.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_join.q.out (working copy)
@@ -32,7 +32,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2'))))
+ expr: ((((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) and (key <> '4'))
type: boolean
Filter Operator
predicate:
@@ -136,7 +136,7 @@
WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4')
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-19-54_404_4275400666711143229/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-21_878_9027559817451359309/-mr-10000
POSTHOOK: query: SELECT src1.c1, src2.c4
FROM
(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1
@@ -146,7 +146,7 @@
WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4')
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-19-54_404_4275400666711143229/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-21_878_9027559817451359309/-mr-10000
200 val_200
200 val_200
200 val_200
@@ -599,7 +599,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2'))))
+ expr: ((((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) and (key <> '4'))
type: boolean
Select Operator
expressions:
@@ -691,7 +691,7 @@
WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4')
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-20-04_832_4076756605206612832/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-25_378_7268215206395812410/-mr-10000
POSTHOOK: query: SELECT src1.c1, src2.c4
FROM
(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1
@@ -701,7 +701,7 @@
WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4')
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-20-04_832_4076756605206612832/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-25_378_7268215206395812410/-mr-10000
200 val_200
200 val_200
200 val_200
Index: ql/src/test/results/clientpositive/smb_mapjoin_6.q.out
===================================================================
--- ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (working copy)
@@ -2611,29 +2611,22 @@
b
TableScan
alias: b
- Sorted Merge Bucket Map Join Operator
- condition map:
- Inner Join 0 to 1
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- outputColumnNames: _col0, _col1, _col4, _col5
- Position of Big Table: 1
- Select Operator
- expressions:
- expr: _col0
- type: int
- expr: _col1
- type: string
- expr: _col4
- type: int
- expr: _col5
- type: string
+ Filter Operator
+ predicate:
+ expr: (key > 1000)
+ type: boolean
+ Sorted Merge Bucket Map Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {key} {value}
+ 1 {key} {value}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
outputColumnNames: _col0, _col1, _col4, _col5
+ Position of Big Table: 1
Select Operator
expressions:
expr: _col0
@@ -2644,15 +2637,26 @@
type: int
expr: _col5
type: string
- outputColumnNames: _col0, _col1, _col2, _col3
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.smb_join_results
+ outputColumnNames: _col0, _col1, _col4, _col5
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: string
+ expr: _col4
+ type: int
+ expr: _col5
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.smb_join_results
Stage: Stage-0
Move Operator
Index: ql/src/test/results/clientpositive/ppd_join2.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_join2.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_join2.q.out (working copy)
@@ -39,7 +39,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key <> '302') and (key < '400')) and ((key <> '311') and ((value <> 'val_50') or (key > '1'))))
+ expr: ((((key <> '302') and (key < '400')) and ((key <> '311') and ((value <> 'val_50') or (key > '1')))) and (key <> '14'))
type: boolean
Filter Operator
predicate:
@@ -214,7 +214,7 @@
WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13)
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-23_609_8022409171851868425/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-29_025_5103018521807584338/-mr-10000
POSTHOOK: query: SELECT src1.c1, src2.c4
FROM
(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1
@@ -227,7 +227,7 @@
WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-23_609_8022409171851868425/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-29_025_5103018521807584338/-mr-10000
0 val_0
0 val_0
0 val_0
@@ -1787,7 +1787,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key <> '302') and (key < '400')) and ((key <> '311') and ((value <> 'val_50') or (key > '1'))))
+ expr: ((((key <> '302') and (key < '400')) and ((key <> '311') and ((value <> 'val_50') or (key > '1')))) and (key <> '14'))
type: boolean
Select Operator
expressions:
@@ -1946,7 +1946,7 @@
WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13)
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-44_680_6270114164732441996/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-35_716_6205094110096605618/-mr-10000
POSTHOOK: query: SELECT src1.c1, src2.c4
FROM
(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1
@@ -1959,7 +1959,7 @@
WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-44_680_6270114164732441996/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-35_716_6205094110096605618/-mr-10000
0 val_0
0 val_0
0 val_0
Index: ql/src/test/results/clientpositive/index_auto_mult_tables_compact.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_auto_mult_tables_compact.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/index_auto_mult_tables_compact.q.out (working copy)
@@ -25,7 +25,7 @@
alias: a
Filter Operator
predicate:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Reduce Output Operator
key expressions:
@@ -46,7 +46,7 @@
alias: b
Filter Operator
predicate:
- expr: ((key > 70) and (key < 90))
+ expr: (((key > 70) and (key < 90)) and ((key > 80) and (key < 100)))
type: boolean
Reduce Output Operator
key expressions:
@@ -86,7 +86,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/salbiz/hive_2011-08-03_11-41-42_789_1684955694193338986/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-27-47_526_7203999924526656619/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -119,7 +119,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-41-42_970_2859185952217320568/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-27-47_643_4004131501649720125/-mr-10000
POSTHOOK: query: SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
@@ -127,7 +127,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-41-42_970_2859185952217320568/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-27-47_643_4004131501649720125/-mr-10000
82 val_82
82 val_82
82 val_82
@@ -262,11 +262,11 @@
TableScan
alias: default__srcpart_srcpart_index__
filterExpr:
- expr: ((key > 70) and (key < 90))
+ expr: ((((key > 70) and (key < 90)) and (key > 80)) and (key < 100))
type: boolean
Filter Operator
predicate:
- expr: ((key > 70) and (key < 90))
+ expr: ((((key > 70) and (key < 90)) and (key > 80)) and (key < 100))
type: boolean
Select Operator
expressions:
@@ -289,13 +289,13 @@
Move Operator
files:
hdfs directory: true
- destination: file:/data/users/salbiz/apache-hive/build/ql/scratchdir/hive_2011-08-03_11-42-13_662_1841905286836798924/-ext-10000
+ destination: file:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_14-28-17_787_983414498423360825/-ext-10000
Stage: Stage-4
Move Operator
files:
hdfs directory: true
- destination: file:/tmp/salbiz/hive_2011-08-03_11-42-12_993_7173516009867952993/-mr-10003
+ destination: file:/tmp/charleschen/hive_2011-08-04_14-28-17_285_9185686871514219301/-mr-10003
Stage: Stage-1
Map Reduce
@@ -304,11 +304,11 @@
TableScan
alias: a
filterExpr:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Filter Operator
predicate:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Reduce Output Operator
key expressions:
@@ -328,11 +328,11 @@
TableScan
alias: b
filterExpr:
- expr: ((key > 70) and (key < 90))
+ expr: (((key > 70) and (key < 90)) and ((key > 80) and (key < 100)))
type: boolean
Filter Operator
predicate:
- expr: ((key > 70) and (key < 90))
+ expr: (((key > 70) and (key < 90)) and ((key > 80) and (key < 100)))
type: boolean
Reduce Output Operator
key expressions:
@@ -372,7 +372,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/salbiz/hive_2011-08-03_11-42-12_993_7173516009867952993/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-28-17_285_9185686871514219301/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -396,7 +396,7 @@
Stage: Stage-6
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/salbiz/apache-hive/build/ql/scratchdir/hive_2011-08-03_11-42-13_662_1841905286836798924/-ext-10001
+ file:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_14-28-17_787_983414498423360825/-ext-10001
File Output Operator
compressed: false
GlobalTableId: 0
@@ -411,11 +411,11 @@
TableScan
alias: default__src_src_index__
filterExpr:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Filter Operator
predicate:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Select Operator
expressions:
@@ -438,18 +438,18 @@
Move Operator
files:
hdfs directory: true
- destination: file:/data/users/salbiz/apache-hive/build/ql/scratchdir/hive_2011-08-03_11-42-13_773_6752137651963996196/-ext-10000
+ destination: file:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_14-28-17_892_524997479658189780/-ext-10000
Stage: Stage-9
Move Operator
files:
hdfs directory: true
- destination: file:/tmp/salbiz/hive_2011-08-03_11-42-12_993_7173516009867952993/-mr-10004
+ destination: file:/tmp/charleschen/hive_2011-08-04_14-28-17_285_9185686871514219301/-mr-10004
Stage: Stage-11
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/salbiz/apache-hive/build/ql/scratchdir/hive_2011-08-03_11-42-13_773_6752137651963996196/-ext-10001
+ file:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_14-28-17_892_524997479658189780/-ext-10001
File Output Operator
compressed: false
GlobalTableId: 0
@@ -474,7 +474,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-42-13_878_115379500838443509/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-28-17_969_2882602825126707252/-mr-10000
POSTHOOK: query: SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@default__src_src_index__
@@ -487,7 +487,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-42-13_878_115379500838443509/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-28-17_969_2882602825126707252/-mr-10000
POSTHOOK: Lineage: default__src_src_index__._bucketname SIMPLE [(src)src.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: default__src_src_index__._offsets EXPRESSION [(src)src.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: default__src_src_index__.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
Index: ql/src/test/results/clientpositive/ppd_outer_join4.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join4.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_outer_join4.q.out (working copy)
@@ -36,20 +36,24 @@
a
TableScan
alias: a
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: (sqrt(key) <> 13)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
b
TableScan
alias: b
@@ -70,18 +74,22 @@
c
TableScan
alias: c
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 2
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (sqrt(key) <> 13)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 2
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -134,7 +142,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-11_041_8830294243573092446/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-26_298_4211451973972360042/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -147,7 +155,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-11_041_8830294243573092446/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-26_298_4211451973972360042/-mr-10000
150 val_150 150 val_150 150
152 val_152 152 val_152 152
152 val_152 152 val_152 152
@@ -416,20 +424,24 @@
a
TableScan
alias: a
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: (sqrt(key) <> 13)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 0
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
b
TableScan
alias: b
@@ -450,18 +462,22 @@
c
TableScan
alias: c
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 2
- value expressions:
- expr: key
- type: string
+ Filter Operator
+ predicate:
+ expr: (sqrt(key) <> 13)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 2
+ value expressions:
+ expr: key
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -475,7 +491,7 @@
outputColumnNames: _col0, _col1, _col4, _col5, _col8
Filter Operator
predicate:
- expr: ((((_col4 > '15') and (_col4 < '25')) and (sqrt(_col8) <> 13)) and ((_col0 > '10') and (_col0 < '20')))
+ expr: (((_col4 > '15') and (_col4 < '25')) and ((_col0 > '10') and (_col0 < '20')))
type: boolean
Select Operator
expressions:
@@ -514,7 +530,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-20_602_416360783321217123/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-29_741_6458783038038362011/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -527,7 +543,7 @@
WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-20_602_416360783321217123/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-29_741_6458783038038362011/-mr-10000
150 val_150 150 val_150 150
152 val_152 152 val_152 152
152 val_152 152 val_152 152
Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out
===================================================================
--- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy)
@@ -54,27 +54,32 @@
TableScan
alias: b
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 10) and (key < 20))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -85,12 +90,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -101,16 +106,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -124,13 +129,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -141,17 +146,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -165,13 +170,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -182,13 +187,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -220,9 +225,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_16-37-11_623_3702980397436579388/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-18-56_329_8878814452815796293/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_16-37-11_623_3702980397436579388/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-18-56_329_8878814452815796293/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -250,7 +255,7 @@
PREHOOK: Input: default@src
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-11_769_4736674847744577829/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-18-56_447_2216911373848024406/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -262,7 +267,7 @@
POSTHOOK: Input: default@src
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-11_769_4736674847744577829/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-18-56_447_2216911373848024406/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
@@ -333,29 +338,34 @@
TableScan
alias: b
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 10) and (key < 20))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [a]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -366,12 +376,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -382,16 +392,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -405,13 +415,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -422,17 +432,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -446,13 +456,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -463,17 +473,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -487,13 +497,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -504,17 +514,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -528,13 +538,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490004
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -545,13 +555,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -586,9 +596,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_16-37-15_926_891674243391284398/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-19-00_059_330569950400004347/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_16-37-15_926_891674243391284398/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-19-00_059_330569950400004347/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -618,7 +628,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-16_088_6735389241100001878/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-19-00_178_145796059542659199/-mr-10000
POSTHOOK: query: FROM
srcpart a
LEFT OUTER JOIN
@@ -632,7 +642,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-16_088_6735389241100001878/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-19-00_178_145796059542659199/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
@@ -701,31 +711,36 @@
TableScan
alias: b
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
- expr: ds
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 10) and (key < 20))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: ds
+ type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -736,12 +751,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -752,16 +767,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -775,13 +790,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -792,17 +807,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -816,13 +831,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -833,17 +848,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -857,13 +872,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -874,17 +889,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -898,13 +913,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490004
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -915,13 +930,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -953,9 +968,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_16-37-20_683_2855828289939844521/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-19-04_293_2019450028716412333/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_16-37-20_683_2855828289939844521/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-19-04_293_2019450028716412333/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -985,7 +1000,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-20_843_3945565438922606221/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-19-04_412_4061146638250844403/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -999,7 +1014,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-20_843_3945565438922606221/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-19-04_412_4061146638250844403/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
@@ -1070,27 +1085,32 @@
TableScan
alias: b
GatherStats: false
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: ((key > 10) and (key < 20))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
Needs Tagging: true
Path -> Alias:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [b]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src [b]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
Path -> Partition:
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
Partition
base file name: src
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1101,12 +1121,12 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1117,16 +1137,16 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
name default.src
serialization.ddl struct src { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378968
+ transient_lastDdlTime 1312490007
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src
name: default.src
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1140,13 +1160,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490002
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1157,17 +1177,17 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1181,13 +1201,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490003
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1198,13 +1218,13 @@
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/srcpart
name default.srcpart
partition_columns ds/hr
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1297378962
+ transient_lastDdlTime 1312490001
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -1236,9 +1256,9 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2011-02-10_16-37-27_062_688763315397090305/-ext-10001
+ directory: file:/tmp/charleschen/hive_2011-08-04_15-19-08_342_9151992816400510626/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_16-37-27_062_688763315397090305/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/charleschen/hive_2011-08-04_15-19-08_342_9151992816400510626/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1266,7 +1286,7 @@
PREHOOK: Input: default@src
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-27_212_5521214912289806758/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-19-08_455_1698430462858537515/-mr-10000
POSTHOOK: query: FROM
srcpart a
LEFT OUTER JOIN
@@ -1278,7 +1298,7 @@
POSTHOOK: Input: default@src
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-37-27_212_5521214912289806758/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-19-08_455_1698430462858537515/-mr-10000
17 val_17 17 val_17
17 val_17 17 val_17
18 val_18 18 val_18
Index: ql/src/test/results/clientpositive/ppd_join3.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_join3.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_join3.q.out (working copy)
@@ -38,7 +38,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key <> '11') and (key < '400')) and ((key > '0') and ((value <> 'val_500') or (key > '1'))))
+ expr: (((((key <> '11') and (key < '400')) and ((key > '0') and ((value <> 'val_500') or (key > '1')))) and (key <> '4')) and (key <> '1'))
type: boolean
Filter Operator
predicate:
@@ -175,7 +175,7 @@
WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1')
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-08_238_6256269886850027827/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-42_577_6662979822128923090/-mr-10000
POSTHOOK: query: SELECT src1.c1, src2.c4
FROM
(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1
@@ -188,7 +188,7 @@
WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1')
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-08_238_6256269886850027827/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-42_577_6662979822128923090/-mr-10000
100 val_100
100 val_100
100 val_100
@@ -1804,7 +1804,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key <> '11') and (key < '400')) and ((key > '0') and ((value <> 'val_500') or (key > '1'))))
+ expr: (((((key <> '11') and (key < '400')) and ((key > '0') and ((value <> 'val_500') or (key > '1')))) and (key <> '4')) and (key <> '1'))
type: boolean
Select Operator
expressions:
@@ -1925,7 +1925,7 @@
WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1')
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-20_069_5364583495609185769/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-46_379_5715042014855774161/-mr-10000
POSTHOOK: query: SELECT src1.c1, src2.c4
FROM
(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1
@@ -1938,7 +1938,7 @@
WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1')
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-20_069_5364583495609185769/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-37-46_379_5715042014855774161/-mr-10000
100 val_100
100 val_100
100 val_100
Index: ql/src/test/results/clientpositive/smb_mapjoin9.q.out
===================================================================
--- ql/src/test/results/clientpositive/smb_mapjoin9.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/smb_mapjoin9.q.out (working copy)
@@ -63,54 +63,58 @@
a
TableScan
alias: a
- Sorted Merge Bucket Map Join Operator
- condition map:
- Inner Join 0 to 1
- condition expressions:
- 0 {key} {ds}
- 1 {key} {value} {ds}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- outputColumnNames: _col0, _col2, _col5, _col6, _col7
- Position of Big Table: 0
- Select Operator
- expressions:
- expr: _col0
- type: int
- expr: _col2
- type: string
- expr: _col5
- type: int
- expr: _col6
- type: string
- expr: _col7
- type: string
+ Filter Operator
+ predicate:
+ expr: key is not null
+ type: boolean
+ Sorted Merge Bucket Map Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {key} {ds}
+ 1 {key} {value} {ds}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
outputColumnNames: _col0, _col2, _col5, _col6, _col7
+ Position of Big Table: 0
Select Operator
expressions:
+ expr: _col0
+ type: int
+ expr: _col2
+ type: string
expr: _col5
type: int
expr: _col6
type: string
expr: _col7
type: string
- expr: _col0
- type: int
- outputColumnNames: _col0, _col1, _col2, _col3
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ outputColumnNames: _col0, _col2, _col5, _col6, _col7
+ Select Operator
+ expressions:
+ expr: _col5
+ type: int
+ expr: _col6
+ type: string
+ expr: _col7
+ type: string
+ expr: _col0
+ type: int
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Stage: Stage-0
Move Operator
files:
hdfs directory: true
- destination: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/smb_mapjoin9_results
+ destination: pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/smb_mapjoin9_results
Stage: Stage-3
Create Table Operator:
Index: ql/src/test/results/clientpositive/auto_join16.q.out
===================================================================
--- ql/src/test/results/clientpositive/auto_join16.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/auto_join16.q.out (working copy)
@@ -63,7 +63,7 @@
alias: a
Filter Operator
predicate:
- expr: ((key > 10) and (key > 20))
+ expr: (((key > 10) and (key > 20)) and (value < 200))
type: boolean
Select Operator
expressions:
@@ -109,7 +109,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/sdong/hive_2011-02-10_01-14-34_877_5596761155901891451/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_13-40-09_960_3421937651195483585/-mr-10002
Reduce Output Operator
sort order:
tag: -1
@@ -147,7 +147,7 @@
alias: a
Filter Operator
predicate:
- expr: ((key > 10) and (key > 20))
+ expr: (((key > 10) and (key > 20)) and (value < 200))
type: boolean
Select Operator
expressions:
@@ -218,7 +218,7 @@
alias: a
Filter Operator
predicate:
- expr: ((key > 10) and (key > 20))
+ expr: (((key > 10) and (key > 20)) and (value < 200))
type: boolean
Select Operator
expressions:
@@ -308,7 +308,7 @@
where tab.value < 200
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-14-35_133_7289459318323276940/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_13-40-10_383_7393462303376577042/-mr-10000
POSTHOOK: query: SELECT sum(hash(subq.key, tab.value))
FROM
(select a.key, a.value from src a where a.key > 10 ) subq
@@ -317,5 +317,5 @@
where tab.value < 200
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-14-35_133_7289459318323276940/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_13-40-10_383_7393462303376577042/-mr-10000
NULL
Index: ql/src/test/results/clientpositive/index_auto_mult_tables.q.out
===================================================================
--- ql/src/test/results/clientpositive/index_auto_mult_tables.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/index_auto_mult_tables.q.out (working copy)
@@ -25,7 +25,7 @@
alias: a
Filter Operator
predicate:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Reduce Output Operator
key expressions:
@@ -46,7 +46,7 @@
alias: b
Filter Operator
predicate:
- expr: ((key > 70) and (key < 90))
+ expr: (((key > 70) and (key < 90)) and ((key > 80) and (key < 100)))
type: boolean
Reduce Output Operator
key expressions:
@@ -86,7 +86,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/salbiz/hive_2011-08-03_11-40-52_874_7254064139212481338/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-27-00_740_6429355123718677789/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -119,7 +119,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-40-53_275_1751206294928260840/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-27-00_868_4166031369010931537/-mr-10000
POSTHOOK: query: SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
@@ -127,7 +127,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-40-53_275_1751206294928260840/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-27-00_868_4166031369010931537/-mr-10000
82 val_82
82 val_82
82 val_82
@@ -260,11 +260,11 @@
TableScan
alias: default__srcpart_srcpart_index__
filterExpr:
- expr: (((key > 70) and (key < 90)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
+ expr: (((((key > 70) and (key < 90)) and (key > 80)) and (key < 100)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
type: boolean
Filter Operator
predicate:
- expr: (((key > 70) and (key < 90)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
+ expr: (((((key > 70) and (key < 90)) and (key > 80)) and (key < 100)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
type: boolean
Select Operator
expressions:
@@ -338,7 +338,7 @@
Move Operator
files:
hdfs directory: true
- destination: file:/tmp/salbiz/hive_2011-08-03_11-41-25_407_3322108409707049422/-mr-10003
+ destination: file:/tmp/charleschen/hive_2011-08-04_14-27-30_496_5864327664936373988/-mr-10003
Stage: Stage-1
Map Reduce
@@ -347,11 +347,11 @@
TableScan
alias: a
filterExpr:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Filter Operator
predicate:
- expr: ((key > 80) and (key < 100))
+ expr: ((((key > 80) and (key < 100)) and (key > 70)) and (key < 90))
type: boolean
Reduce Output Operator
key expressions:
@@ -371,11 +371,11 @@
TableScan
alias: b
filterExpr:
- expr: ((key > 70) and (key < 90))
+ expr: (((key > 70) and (key < 90)) and ((key > 80) and (key < 100)))
type: boolean
Filter Operator
predicate:
- expr: ((key > 70) and (key < 90))
+ expr: (((key > 70) and (key < 90)) and ((key > 80) and (key < 100)))
type: boolean
Reduce Output Operator
key expressions:
@@ -415,7 +415,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/salbiz/hive_2011-08-03_11-41-25_407_3322108409707049422/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_14-27-30_496_5864327664936373988/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -443,11 +443,11 @@
TableScan
alias: default__src_src_index__
filterExpr:
- expr: (((key > 80) and (key < 100)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
+ expr: (((((key > 80) and (key < 100)) and (key > 70)) and (key < 90)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
type: boolean
Filter Operator
predicate:
- expr: (((key > 80) and (key < 100)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
+ expr: (((((key > 80) and (key < 100)) and (key > 70)) and (key < 90)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))
type: boolean
Select Operator
expressions:
@@ -521,7 +521,7 @@
Move Operator
files:
hdfs directory: true
- destination: file:/tmp/salbiz/hive_2011-08-03_11-41-25_407_3322108409707049422/-mr-10004
+ destination: file:/tmp/charleschen/hive_2011-08-04_14-27-30_496_5864327664936373988/-mr-10004
Stage: Stage-0
Fetch Operator
@@ -540,7 +540,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-41-26_398_2468484527777301186/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-27-31_222_2234297474551557012/-mr-10000
POSTHOOK: query: SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@default__src_src_index__
@@ -553,7 +553,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-41-26_398_2468484527777301186/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_14-27-31_222_2234297474551557012/-mr-10000
POSTHOOK: Lineage: default__src_src_index__._bitmaps EXPRESSION [(src)src.FieldSchema(name:ROW__OFFSET__INSIDE__BLOCK, type:bigint, comment:), ]
POSTHOOK: Lineage: default__src_src_index__._bucketname SIMPLE [(src)src.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: default__src_src_index__._offset SIMPLE [(src)src.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
Index: ql/src/test/results/clientpositive/ppd_outer_join1.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_outer_join1.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_outer_join1.q.out (working copy)
@@ -51,20 +51,24 @@
b
TableScan
alias: b
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: ((key > 10) and (key < 20))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -110,7 +114,7 @@
WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-10_517_1449004673525932594/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-05_529_7524832807059785440/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -120,7 +124,7 @@
WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-10_517_1449004673525932594/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-05_529_7524832807059785440/-mr-10000
17 val_17 17 val_17
18 val_18 18 val_18
18 val_18 18 val_18
@@ -180,20 +184,24 @@
b
TableScan
alias: b
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Filter Operator
+ predicate:
+ expr: ((key > 10) and (key < 20))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -239,7 +247,7 @@
WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-20_062_1274880924259137309/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-08_922_5421203911417883432/-mr-10000
POSTHOOK: query: FROM
src a
LEFT OUTER JOIN
@@ -249,7 +257,7 @@
WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-20_062_1274880924259137309/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-08_922_5421203911417883432/-mr-10000
17 val_17 17 val_17
18 val_18 18 val_18
18 val_18 18 val_18
Index: ql/src/test/results/clientpositive/ppd_udf_case.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_udf_case.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_udf_case.q.out (working copy)
@@ -64,24 +64,28 @@
b
TableScan
alias: b
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
- expr: ds
- type: string
- expr: hr
- type: string
+ Filter Operator
+ predicate:
+ expr: CASE (key) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: ds
+ type: string
+ expr: hr
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -124,7 +128,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-19_011_6078693060764998539/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_15-38-40_124_3085325060954835780/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -190,7 +194,7 @@
PREHOOK: type: QUERY
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-20_085_7457265695938103038/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-40_241_9100931972676880091/-mr-10000
POSTHOOK: query: SELECT *
FROM srcpart a JOIN srcpart b
ON a.key = b.key
@@ -205,7 +209,7 @@
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-20_085_7457265695938103038/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-40_241_9100931972676880091/-mr-10000
27 val_27 2008-04-08 11 27 val_27 2008-04-08 11
27 val_27 2008-04-08 11 27 val_27 2008-04-08 12
27 val_27 2008-04-08 12 27 val_27 2008-04-08 11
@@ -276,24 +280,28 @@
b
TableScan
alias: b
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
- expr: ds
- type: string
- expr: hr
- type: string
+ Filter Operator
+ predicate:
+ expr: CASE (key) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: ds
+ type: string
+ expr: hr
+ type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -332,7 +340,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-40_322_7654615073140246084/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_15-38-46_616_4592989379602081302/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -398,7 +406,7 @@
PREHOOK: type: QUERY
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-40_840_3692167909003016050/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-46_730_8355573912192239451/-mr-10000
POSTHOOK: query: SELECT *
FROM srcpart a JOIN srcpart b
ON a.key = b.key
@@ -413,7 +421,7 @@
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-40_840_3692167909003016050/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_15-38-46_730_8355573912192239451/-mr-10000
27 val_27 2008-04-08 11 27 val_27 2008-04-08 11
27 val_27 2008-04-08 11 27 val_27 2008-04-08 12
27 val_27 2008-04-08 12 27 val_27 2008-04-08 11
Index: ql/src/test/results/clientpositive/union22.q.out
===================================================================
--- ql/src/test/results/clientpositive/union22.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/union22.q.out (working copy)
@@ -103,7 +103,7 @@
Filter Operator
isSamplingPred: false
predicate:
- expr: (k0 > 50)
+ expr: ((k0 > 50) and (k1 > 20))
type: boolean
Select Operator
expressions:
@@ -157,7 +157,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/tomasz/hive_2011-06-01_21-10-51_082_6975609978266062150/-mr-10002
+ directory: file:/tmp/charleschen/hive_2011-08-04_16-37-43_403_2940585090717921602/-mr-10002
NumFilesPerFileSink: 1
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -173,9 +173,9 @@
Map Reduce Local Work
Needs Tagging: false
Path -> Alias:
- pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a]
Path -> Partition:
- pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22/ds=1
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22/ds=1
Partition
base file name: ds=1
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -188,7 +188,7 @@
columns.types string:string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22/ds=1
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22/ds=1
name default.dst_union22
numFiles 1
numPartitions 1
@@ -199,7 +199,7 @@
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 11624
- transient_lastDdlTime 1306987846
+ transient_lastDdlTime 1312501059
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -210,7 +210,7 @@
columns.types string:string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22
name default.dst_union22
numFiles 1
numPartitions 1
@@ -221,7 +221,7 @@
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 11624
- transient_lastDdlTime 1306987846
+ transient_lastDdlTime 1312501059
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dst_union22
name: default.dst_union22
@@ -229,7 +229,7 @@
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/tomasz/hive_2011-06-01_21-10-51_082_6975609978266062150/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_16-37-43_403_2940585090717921602/-mr-10002
Select Operator
expressions:
expr: _col0
@@ -267,10 +267,10 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-36-05_942_4742399607666706050/-ext-10000
+ directory: pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_16-37-43_403_2940585090717921602/-ext-10000
NumFilesPerFileSink: 1
Static Partition Specification: ds=2/
- Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-36-05_942_4742399607666706050/-ext-10000/
+ Stats Publishing Key Prefix: pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_16-37-43_403_2940585090717921602/-ext-10000/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -280,7 +280,7 @@
columns.types string:string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dst_union22
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22
name default.dst_union22
numFiles 1
numPartitions 1
@@ -291,7 +291,7 @@
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 11624
- transient_lastDdlTime 1310380562
+ transient_lastDdlTime 1312501059
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dst_union22
TotalFiles: 1
@@ -332,10 +332,10 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_21-10-51_082_6975609978266062150/-ext-10000
+ directory: pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_16-37-43_403_2940585090717921602/-ext-10000
NumFilesPerFileSink: 1
Static Partition Specification: ds=2/
- Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_21-10-51_082_6975609978266062150/-ext-10000/
+ Stats Publishing Key Prefix: pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_16-37-43_403_2940585090717921602/-ext-10000/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -345,7 +345,7 @@
columns.types string:string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22
name default.dst_union22
numFiles 1
numPartitions 1
@@ -356,7 +356,7 @@
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 11624
- transient_lastDdlTime 1306987846
+ transient_lastDdlTime 1312501059
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dst_union22
TotalFiles: 1
@@ -364,10 +364,10 @@
MultiFileSpray: false
Needs Tagging: false
Path -> Alias:
- file:/tmp/tomasz/hive_2011-06-01_21-10-51_082_6975609978266062150/-mr-10002 [file:/tmp/tomasz/hive_2011-06-01_21-10-51_082_6975609978266062150/-mr-10002]
- pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta]
+ file:/tmp/charleschen/hive_2011-08-04_16-37-43_403_2940585090717921602/-mr-10002 [file:/tmp/charleschen/hive_2011-08-04_16-37-43_403_2940585090717921602/-mr-10002]
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta]
Path -> Partition:
- file:/tmp/tomasz/hive_2011-06-01_21-10-51_082_6975609978266062150/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_16-37-43_403_2940585090717921602/-mr-10002
Partition
base file name: -mr-10002
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -383,7 +383,7 @@
columns _col0,_col1,_col10,_col11
columns.types string,string,string,string
escape.delim \
- pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22_delta/ds=1
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1
Partition
base file name: ds=1
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -396,7 +396,7 @@
columns.types string:string:string:string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22_delta/ds=1
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1
name default.dst_union22_delta
numFiles 1
numPartitions 1
@@ -407,7 +407,7 @@
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 17436
- transient_lastDdlTime 1306987850
+ transient_lastDdlTime 1312501063
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -418,7 +418,7 @@
columns.types string:string:string:string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22_delta
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta
name default.dst_union22_delta
numFiles 1
numPartitions 1
@@ -429,7 +429,7 @@
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 17436
- transient_lastDdlTime 1306987851
+ transient_lastDdlTime 1312501063
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dst_union22_delta
name: default.dst_union22_delta
@@ -440,7 +440,7 @@
partition:
ds 2
replace: true
- source: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_21-10-51_082_6975609978266062150/-ext-10000
+ source: pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_16-37-43_403_2940585090717921602/-ext-10000
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -450,7 +450,7 @@
columns.types string:string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22
+ location pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dst_union22
name default.dst_union22
numFiles 1
numPartitions 1
@@ -461,14 +461,14 @@
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 11624
- transient_lastDdlTime 1306987846
+ transient_lastDdlTime 1312501059
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dst_union22
- tmp directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_21-10-51_082_6975609978266062150/-ext-10001
+ tmp directory: pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_16-37-43_403_2940585090717921602/-ext-10001
Stage: Stage-4
Stats-Aggr Operator
- Stats Aggregation Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_21-10-51_082_6975609978266062150/-ext-10000/
+ Stats Aggregation Key Prefix: pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_16-37-43_403_2940585090717921602/-ext-10000/
PREHOOK: query: insert overwrite table dst_union22 partition (ds='2')
@@ -518,11 +518,11 @@
PREHOOK: query: select * from dst_union22 where ds = '2' order by k1, k2, k3, k4
PREHOOK: type: QUERY
PREHOOK: Input: default@dst_union22@ds=2
-PREHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_21-11-00_529_7757406641359320404/-mr-10000
+PREHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_16-37-52_911_2656054527092967946/-mr-10000
POSTHOOK: query: select * from dst_union22 where ds = '2' order by k1, k2, k3, k4
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dst_union22@ds=2
-POSTHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_21-11-00_529_7757406641359320404/-mr-10000
+POSTHOOK: Output: file:/tmp/charleschen/hive_2011-08-04_16-37-52_911_2656054527092967946/-mr-10000
POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
Index: ql/src/test/results/clientpositive/ppd_gby_join.q.out
===================================================================
--- ql/src/test/results/clientpositive/ppd_gby_join.q.out (revision 1158048)
+++ ql/src/test/results/clientpositive/ppd_gby_join.q.out (working copy)
@@ -35,7 +35,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2'))))
+ expr: ((((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) and (key <> '4'))
type: boolean
Filter Operator
predicate:
@@ -131,7 +131,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-14-53_815_8679319347089159838/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_15-37-21_382_2763297735817910570/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -210,7 +210,7 @@
alias: src
Filter Operator
predicate:
- expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2'))))
+ expr: ((((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) and (key <> '4'))
type: boolean
Select Operator
expressions:
@@ -294,7 +294,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-14-54_718_7102065647101283820/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_15-37-21_485_1637535815955409976/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
Index: ql/src/test/results/compiler/plan/join2.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join2.q.xml (revision 1158048)
+++ ql/src/test/results/compiler/plan/join2.q.xml (working copy)
@@ -1,5 +1,5 @@
-
+
@@ -129,7 +129,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-11_853_6440618519033803488/-ext-10001
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-38_202_8886647895598090207/-ext-10001
@@ -207,11 +207,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -269,11 +269,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -314,6 +314,20 @@
+
+ VALUE._col0
+
+
+ _col4
+
+
+ src2
+
+
+
+
+
+
@@ -598,6 +612,20 @@
+
+ VALUE._col0
+
+
+ key
+
+
+ src3
+
+
+
+
+
+
@@ -918,7 +946,7 @@
- file:/tmp/sdong/hive_2011-04-28_23-56-11_853_6440618519033803488/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_17-13-38_202_8886647895598090207/-mr-10002
$INTNAME
@@ -926,7 +954,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src3
@@ -938,7 +966,7 @@
- file:/tmp/sdong/hive_2011-04-28_23-56-11_853_6440618519033803488/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_17-13-38_202_8886647895598090207/-mr-10002
-mr-10002
@@ -987,7 +1015,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src
@@ -1044,11 +1072,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -1106,11 +1134,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -1139,7 +1167,7 @@
1
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-11_853_6440618519033803488/-ext-10000
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-38_202_8886647895598090207/-ext-10000
true
@@ -1148,7 +1176,7 @@
1
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-11_853_6440618519033803488/-ext-10000/
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-38_202_8886647895598090207/-ext-10000/
@@ -1661,11 +1689,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -1723,11 +1751,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -1793,11 +1821,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -1855,11 +1883,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -2477,7 +2505,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src2
@@ -2492,7 +2520,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src
@@ -2549,11 +2577,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -2611,11 +2639,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060169
+ 1312503216
@@ -2637,7 +2665,7 @@
- file:/tmp/sdong/hive_2011-04-28_23-56-11_853_6440618519033803488/-mr-10002
+ file:/tmp/charleschen/hive_2011-08-04_17-13-38_202_8886647895598090207/-mr-10002
1
Index: ql/src/test/results/compiler/plan/join3.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join3.q.xml (revision 1158048)
+++ ql/src/test/results/compiler/plan/join3.q.xml (working copy)
@@ -1,5 +1,5 @@
-
+
@@ -22,7 +22,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-22_007_8482826279762938755/-ext-10000/
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-45_047_3201271396902323979/-ext-10000/
@@ -58,7 +58,7 @@
true
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-22_007_8482826279762938755/-ext-10000
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-45_047_3201271396902323979/-ext-10000
@@ -111,11 +111,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dest1
transient_lastDdlTime
- 1304060181
+ 1312503224
@@ -125,7 +125,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-22_007_8482826279762938755/-ext-10001
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-45_047_3201271396902323979/-ext-10001
@@ -196,11 +196,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -258,11 +258,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -328,11 +328,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -390,11 +390,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -460,11 +460,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -522,11 +522,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -548,7 +548,26 @@
-
+
+
+ VALUE._col0
+
+
+ key
+
+
+ src2
+
+
+
+
+ string
+
+
+
+
+
+
@@ -566,11 +585,7 @@
src2
-
-
- string
-
-
+
@@ -829,6 +844,20 @@
+
+ VALUE._col0
+
+
+ key
+
+
+ src3
+
+
+
+
+
+
@@ -1416,7 +1445,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src2
@@ -1434,7 +1463,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src
@@ -1491,11 +1520,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -1553,11 +1582,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060179
+ 1312503223
@@ -1586,7 +1615,7 @@
1
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-22_007_8482826279762938755/-ext-10000
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-45_047_3201271396902323979/-ext-10000
true
@@ -1595,7 +1624,7 @@
1
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-22_007_8482826279762938755/-ext-10000/
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-45_047_3201271396902323979/-ext-10000/
Index: ql/src/test/results/compiler/plan/join1.q.xml
===================================================================
--- ql/src/test/results/compiler/plan/join1.q.xml (revision 1158048)
+++ ql/src/test/results/compiler/plan/join1.q.xml (working copy)
@@ -1,5 +1,5 @@
-
+
@@ -22,7 +22,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-01_985_2735813683400211590/-ext-10000/
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-31_618_4947074952405203802/-ext-10000/
@@ -58,7 +58,7 @@
true
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-01_985_2735813683400211590/-ext-10000
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-31_618_4947074952405203802/-ext-10000
@@ -111,11 +111,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/dest1
transient_lastDdlTime
- 1304060161
+ 1312503211
@@ -125,7 +125,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-01_985_2735813683400211590/-ext-10001
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-31_618_4947074952405203802/-ext-10001
@@ -196,11 +196,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060159
+ 1312503209
@@ -258,11 +258,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060159
+ 1312503209
@@ -328,11 +328,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060159
+ 1312503209
@@ -390,11 +390,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060159
+ 1312503209
@@ -435,6 +435,20 @@
+
+ VALUE._col0
+
+
+ key
+
+
+ src2
+
+
+
+
+
+
@@ -1026,7 +1040,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src2
@@ -1041,7 +1055,7 @@
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
src
@@ -1098,11 +1112,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060159
+ 1312503209
@@ -1160,11 +1174,11 @@
location
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src
+ pfile:/data/users/charleschen/hive-trunk/build/ql/test/data/warehouse/src
transient_lastDdlTime
- 1304060159
+ 1312503209
@@ -1193,7 +1207,7 @@
1
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-01_985_2735813683400211590/-ext-10000
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-31_618_4947074952405203802/-ext-10000
true
@@ -1202,7 +1216,7 @@
1
- pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-56-01_985_2735813683400211590/-ext-10000/
+ pfile:/data/users/charleschen/hive-trunk/build/ql/scratchdir/hive_2011-08-04_17-13-31_618_4947074952405203802/-ext-10000/
Index: ql/src/test/queries/clientpositive/ppd_transitivity.q
===================================================================
--- ql/src/test/queries/clientpositive/ppd_transitivity.q (revision 0)
+++ ql/src/test/queries/clientpositive/ppd_transitivity.q (revision 0)
@@ -0,0 +1,9 @@
+drop table invites;
+drop table invites2;
+create table invites (foo int, bar string) partitioned by (ds string);
+create table invites2 (foo int, bar string) partitioned by (ds string);
+explain select count(*) from invites join invites2 on invites.ds=invites2.ds where invites.ds='2011-01-01';
+explain select * from invites join invites2 on invites.ds=invites2.ds where invites.ds='2011-01-01';
+explain select * from (select a.foo, b.bar from invites a join invites2 b on a.foo=b.foo) c join invites d on c.foo=d.bar where d.bar='10';
+drop table invites;
+drop table invites2;
\ No newline at end of file
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (revision 1158048)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (working copy)
@@ -508,6 +508,8 @@
String[] nm = parResover.reverseLookup(outputCol);
ColumnInfo colInfo = oldRR.get(nm[0], nm[1]);
if (colInfo != null) {
+ String internalName=colInfo.getInternalName();
+ newMap.put(internalName, oldMap.get(internalName));
newRR.put(nm[0], nm[1], colInfo);
}
}
Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (revision 1158048)
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (working copy)
@@ -19,17 +19,19 @@
import java.io.Serializable;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
-import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
@@ -43,11 +45,13 @@
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
-import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
@@ -222,8 +226,30 @@
OpWalkerInfo owi = (OpWalkerInfo) procCtx;
Set aliases = getQualifiedAliases((JoinOperator) nd, owi
.getRowResolver(nd));
- boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, aliases, false);
+ // we pass null for aliases here because mergeWithChildrenPred filters
+ // aliases in the children node context and we need to filter them in
+ // the current JoinOperator's context
+ boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, null, false);
+ ExprWalkerInfo prunePreds =
+ owi.getPrunedPreds((Operator extends Serializable>) nd);
+ if (prunePreds != null) {
+ for (String key : prunePreds.getFinalCandidates().keySet()) {
+ if (!aliases.contains(key)) {
+ prunePreds.getFinalCandidates().remove(key);
+ }
+ }
+ }
+
+ // If we have a query like select * from invites join invites2 on
+ // invites.ds=invites2.ds where invites.ds='2011-01-01', then we want to
+ // recognize transitivity and push the filter invites2.ds='2011-01-01'
+ // down to invites2 as well.
if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
+ HiveConf.ConfVars.HIVEPPDRECOGNIZETRANSITIVITY)) {
+ applyFilterTransitivity((JoinOperator) nd, owi);
+ }
+
+ if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
if (hasUnpushedPredicates) {
aliases = null;
@@ -234,6 +260,111 @@
return null;
}
+ private void applyFilterTransitivity(JoinOperator nd,OpWalkerInfo owi)
+ throws SemanticException {
+ ExprWalkerInfo prunePreds =
+ owi.getPrunedPreds((Operator extends Serializable>) nd);
+ if (prunePreds != null) {
+ // We want to use the row resolvers of the parents of the join op
+ // because the rowresolver refers to the input columns and the
+ // filters at this point refer to the output columns of the join
+ // operator
+ Map aliasToRR = new HashMap();
+ for (Operator extends Serializable> o : (nd).getParentOperators()) {
+ for (String alias : owi.getRowResolver(o).getTableNames()){
+ aliasToRR.put(alias, owi.getRowResolver(o));
+ }
+ }
+
+ ArrayList> eqExpressions = owi.getParseContext().getJoinContext()
+ .get(nd).getExpressions();
+ int numColumns = eqExpressions.size();
+ int numEqualities = eqExpressions.get(0).size();
+
+ JoinCondDesc[] joins = (nd).getConf().getConds();
+
+ Map> oldFilters =
+ prunePreds.getFinalCandidates();
+ Map> newFilters =
+ new HashMap>();
+
+ for (int i=0; i colsreferenced =
+ new HashSet(expr.getCols());
+ if (colsreferenced.size() == 1
+ && colsreferenced.contains(left.getInternalName())){
+ ExprNodeDesc newexpr = expr.clone();
+ replaceColumnReference(newexpr, left.getInternalName(),
+ right.getInternalName());
+ if (newFilters.get(right.getTabAlias()) == null) {
+ newFilters.put(right.getTabAlias(),
+ new ArrayList());
+ }
+ newFilters.get(right.getTabAlias()).add(newexpr);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ for (Entry> aliasToFilters
+ : newFilters.entrySet()){
+ owi.getPrunedPreds((Operator extends Serializable>) nd)
+ .addPushDowns(aliasToFilters.getKey(), aliasToFilters.getValue());
+ }
+ }
+ }
+
+ private ColumnInfo getColumnInfoFromAST(ASTNode nd,
+ Map aliastoRR) throws SemanticException {
+ if (nd.getType()==HiveParser.DOT) {
+ if (nd.getChildCount()==2) {
+ if (nd.getChild(0).getType()==HiveParser.TOK_TABLE_OR_COL
+ && nd.getChild(0).getChildCount()==1
+ && nd.getChild(1).getType()==HiveParser.Identifier){
+ String alias = nd.getChild(0).getChild(0).getText().toLowerCase();
+ String column = nd.getChild(1).getText().toLowerCase();
+ RowResolver rr=aliastoRR.get(alias);
+ if (rr == null) {
+ return null;
+ }
+ return rr.get(alias, column);
+ }
+ }
+ }
+ return null;
+ }
+
+ private void replaceColumnReference(ExprNodeDesc expr,
+ String oldcolumn,String newcolumn) {
+ if (expr instanceof ExprNodeColumnDesc) {
+ if (((ExprNodeColumnDesc) expr).getColumn().equals(oldcolumn)){
+ ((ExprNodeColumnDesc) expr).setColumn(newcolumn);
+ }
+ }
+
+ if (expr.getChildren() != null){
+ for (ExprNodeDesc childexpr : expr.getChildren()) {
+ replaceColumnReference(childexpr, oldcolumn, newcolumn);
+ }
+ }
+ }
+
/**
* Figures out the aliases for whom it is safe to push predicates based on
* ANSI SQL semantics For inner join, all predicates for all aliases can be