Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1038341) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -256,8 +256,8 @@ HIVEMAXMAPJOINSIZE("hive.mapjoin.maxsize", 100000), HIVEHASHTABLETHRESHOLD("hive.hashtable.initialCapacity", 100000), HIVEHASHTABLELOADFACTOR("hive.hashtable.loadfactor", (float) 0.75), - HIVEHASHTABLEMAXMEMORYUSAGE("hive.hashtable.max.memory.usage", (float) 0.90), - HIVEHASHTABLESCALE("hive.hashtable.scale", (long)100000), + HIVEHASHTABLEMAXMEMORYUSAGE("hive.mapjoin.localtask.max.memory.usage", (float) 0.90), + HIVEHASHTABLESCALE("hive.mapjoin.check.memory.rows", (long)100000), HIVEDEBUGLOCALTASK("hive.debug.localtask",false), Index: ql/src/test/results/clientpositive/auto_join25.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join25.q.out (revision 1038341) +++ ql/src/test/results/clientpositive/auto_join25.q.out (working copy) @@ -13,6 +13,8 @@ PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 PREHOOK: Output: default@dest1 +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where (src1.ds = '2008-04-08' or src1.ds = '2008-04-09' )and (src1.hr = '12' or src1.hr = '11') @@ -28,11 +30,11 @@ PREHOOK: query: SELECT sum(hash(dest1.key,dest1.value)) FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-09-55_396_5931103673819276275/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-12_062_2731833788874193660/-mr-10000 POSTHOOK: query: SELECT sum(hash(dest1.key,dest1.value)) FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-09-55_396_5931103673819276275/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-12_062_2731833788874193660/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 407444119660 @@ -48,6 +50,10 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest_j2 +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key) INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value POSTHOOK: type: QUERY @@ -60,11 +66,11 @@ PREHOOK: query: SELECT sum(hash(dest_j2.key,dest_j2.value)) FROM dest_j2 PREHOOK: type: QUERY PREHOOK: Input: default@dest_j2 -PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-05_166_1509512648391049274/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-36_524_5308749215651001089/-mr-10000 POSTHOOK: query: SELECT sum(hash(dest_j2.key,dest_j2.value)) FROM dest_j2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j2 -POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-05_166_1509512648391049274/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-36_524_5308749215651001089/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j2.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] @@ -84,6 +90,8 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest_j1 +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value POSTHOOK: type: QUERY @@ -98,11 +106,11 @@ PREHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1 PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-12_389_3216138172725864168/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-49_448_5881123257419888652/-mr-10000 POSTHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-12_13-10-12_389_3216138172725864168/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-23_13-09-49_448_5881123257419888652/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1038341) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -827,10 +827,10 @@ if (backupTask != null) { errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " + tsk.getClass().getName(); - console.printInfo(errorMessage); + console.printError(errorMessage); errorMessage = "ATTEMPT: Execute BackupTask: " + backupTask.getClass().getName(); - console.printInfo(errorMessage); + console.printError(errorMessage); // add backup task to runnable if (DriverContext.isLaunchable(backupTask)) {