diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7932a3d..bfe08a0 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -182,6 +182,7 @@
LOCALSCRATCHDIR("hive.exec.local.scratchdir", System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")),
SCRATCHDIRPERMISSION("hive.scratch.dir.permission", "700"),
SUBMITVIACHILD("hive.exec.submitviachild", false),
+ SUBMITLOCALTASKVIACHILD("hive.exec.submit.local.task.via.child", true),
SCRIPTERRORLIMIT("hive.exec.script.maxerrsize", 100000),
ALLOWPARTIALCONSUMP("hive.exec.script.allow.partial.consumption", false),
STREAMREPORTERPERFIX("stream.stderr.reporter.prefix", "reporter:"),
diff --git data/conf/hive-site.xml data/conf/hive-site.xml
index 7931d6a..1c9c598 100644
--- data/conf/hive-site.xml
+++ data/conf/hive-site.xml
@@ -216,4 +216,9 @@
Using dummy config value above because you cannot override config with empty value
+
+ hive.exec.submit.local.task.via.child
+ false
+
+
diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 2974327..4643cf4 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -827,6 +827,7 @@ public String cliInit(String tname, boolean recreate) throws Exception {
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
+ Utilities.clearWorkMap();
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
ss.in = System.in;
diff --git pom.xml pom.xml
index a945f63..d2dfceb 100644
--- pom.xml
+++ pom.xml
@@ -717,7 +717,7 @@
true
false
false
- -Xmx1024m -XX:MaxPermSize=256m
+ -Xmx8192m -XX:MaxPermSize=512m
${test.tmp.dir}/conf
${basedir}/${hive.path.to.root}/conf
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index 2ce4dbd..a9869f7 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -127,8 +127,7 @@ public int execute(DriverContext driverContext) {
}
}
- runningViaChild = ShimLoader.getHadoopShims().isLocalMode(conf) ||
- conf.getBoolVar(HiveConf.ConfVars.SUBMITVIACHILD);
+ runningViaChild = conf.getBoolVar(HiveConf.ConfVars.SUBMITVIACHILD);
if(!runningViaChild) {
// we are not running this mapred task via child jvm
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index d2e122d..93b0498 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -130,6 +130,13 @@ public boolean requireLock() {
@Override
public int execute(DriverContext driverContext) {
+
+ if (!conf.getBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD)) {
+ // execute in process
+ return executeFromChildJVM(driverContext);
+ }
+
+ // execute in child jvm
try {
// generate the cmd line to run in the child jvm
Context ctx = driverContext.getCtx();
diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
index 89bc1a7..54aa987 100644
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
@@ -130,7 +130,6 @@ public void iterate(AggregationBuffer agg, Object[] parameters)
assert parameters.length == 0;
((CountAgg) agg).value++;
} else {
- assert parameters.length > 0;
boolean countThisRow = true;
for (Object nextParam : parameters) {
if (nextParam == null) {
diff --git ql/src/test/results/clientpositive/authorization_explain.q.out ql/src/test/results/clientpositive/authorization_explain.q.out
index 3aeb170..853370b 100644
--- ql/src/test/results/clientpositive/authorization_explain.q.out
+++ ql/src/test/results/clientpositive/authorization_explain.q.out
@@ -16,9 +16,6 @@ CURRENT_USER:
hive_test_user
OPERATION:
QUERY
-AUTHORIZATION_FAILURES:
- No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key}
- No privilege 'Select' found for inputs { database:default, table:src, columnName:key}
Warning: Shuffle Join JOIN[4][tables = [src, srcpart]] in Stage 'Stage-1:MAPRED' is a cross product
PREHOOK: query: explain formatted authorization select * from src join srcpart
PREHOOK: type: QUERY
diff --git ql/src/test/results/clientpositive/auto_join25.q.out ql/src/test/results/clientpositive/auto_join25.q.out
index 927f898..21188f8 100644
--- ql/src/test/results/clientpositive/auto_join25.q.out
+++ ql/src/test/results/clientpositive/auto_join25.q.out
@@ -19,16 +19,6 @@ PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
PREHOOK: Output: default@dest1
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-7
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
@@ -72,28 +62,8 @@ INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@dest_j2
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-14
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-12
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
@@ -134,16 +104,6 @@ INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@dest_j1
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-7
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
diff --git ql/src/test/results/clientpositive/auto_join_without_localtask.q.out ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
index 01805ed..25d349f 100644
--- ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
+++ ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
@@ -823,27 +823,8 @@ select a.* from src a join src b on a.key=b.key join src c on a.value=c.value wh
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-14
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-5
-
-Logs:
-
#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java
index 077c371..c96fc2d 100644
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java
@@ -112,9 +112,6 @@ protected StandardStructObjectInspector(List structFieldNames,
protected void init(List structFieldNames,
List structFieldObjectInspectors,
List structFieldComments) {
- assert (structFieldNames.size() == structFieldObjectInspectors.size());
- assert (structFieldComments == null ||
- (structFieldNames.size() == structFieldComments.size()));
fields = new ArrayList(structFieldNames.size());
for (int i = 0; i < structFieldNames.size(); i++) {
@@ -182,7 +179,6 @@ public Object getStructFieldData(Object data, StructField fieldRef) {
LOG.warn("ignoring similar errors.");
}
int fieldID = f.getFieldID();
- assert (fieldID >= 0 && fieldID < fields.size());
if (fieldID >= listSize) {
return null;
@@ -205,7 +201,6 @@ public Object getStructFieldData(Object data, StructField fieldRef) {
data = java.util.Arrays.asList((Object[]) data);
}
List