diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7932a3d..bfe08a0 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -182,6 +182,7 @@
LOCALSCRATCHDIR("hive.exec.local.scratchdir", System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")),
SCRATCHDIRPERMISSION("hive.scratch.dir.permission", "700"),
SUBMITVIACHILD("hive.exec.submitviachild", false),
+ SUBMITLOCALTASKVIACHILD("hive.exec.submit.local.task.via.child", true),
SCRIPTERRORLIMIT("hive.exec.script.maxerrsize", 100000),
ALLOWPARTIALCONSUMP("hive.exec.script.allow.partial.consumption", false),
STREAMREPORTERPERFIX("stream.stderr.reporter.prefix", "reporter:"),
diff --git contrib/src/test/queries/clientnegative/case_with_row_sequence.q contrib/src/test/queries/clientnegative/case_with_row_sequence.q
index b51dc6e..910ffda 100644
--- contrib/src/test/queries/clientnegative/case_with_row_sequence.q
+++ contrib/src/test/queries/clientnegative/case_with_row_sequence.q
@@ -1,3 +1,6 @@
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
+
drop temporary function row_sequence;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
diff --git contrib/src/test/queries/clientpositive/dboutput.q contrib/src/test/queries/clientpositive/dboutput.q
index 28f1710..4c4f5d5 100644
--- contrib/src/test/queries/clientpositive/dboutput.q
+++ contrib/src/test/queries/clientpositive/dboutput.q
@@ -6,6 +6,8 @@ set mapred.map.tasks.speculative.execution=false;
set mapred.reduce.tasks.speculative.execution=false;
set mapred.map.tasks=1;
set mapred.reduce.tasks=1;
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
ADD JAR ${system:maven.local.repository}/org/apache/derby/derby/${system:derby.version}/derby-${system:derby.version}.jar;
diff --git data/conf/hive-site.xml data/conf/hive-site.xml
index 7931d6a..1c9c598 100644
--- data/conf/hive-site.xml
+++ data/conf/hive-site.xml
@@ -216,4 +216,9 @@
Using dummy config value above because you cannot override config with empty value
+
+ hive.exec.submit.local.task.via.child
+ false
+
+
diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 2974327..4643cf4 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -827,6 +827,7 @@ public String cliInit(String tname, boolean recreate) throws Exception {
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
+ Utilities.clearWorkMap();
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
ss.in = System.in;
diff --git pom.xml pom.xml
index a945f63..1b5a049 100644
--- pom.xml
+++ pom.xml
@@ -717,7 +717,7 @@
true
false
false
- -Xmx1024m -XX:MaxPermSize=256m
+ -Xmx4096m -XX:MaxPermSize=512m
${test.tmp.dir}/conf
${basedir}/${hive.path.to.root}/conf
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index 2ce4dbd..a9869f7 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -127,8 +127,7 @@ public int execute(DriverContext driverContext) {
}
}
- runningViaChild = ShimLoader.getHadoopShims().isLocalMode(conf) ||
- conf.getBoolVar(HiveConf.ConfVars.SUBMITVIACHILD);
+ runningViaChild = conf.getBoolVar(HiveConf.ConfVars.SUBMITVIACHILD);
if(!runningViaChild) {
// we are not running this mapred task via child jvm
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index d2e122d..93b0498 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -130,6 +130,13 @@ public boolean requireLock() {
@Override
public int execute(DriverContext driverContext) {
+
+ if (!conf.getBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD)) {
+ // execute in process
+ return executeFromChildJVM(driverContext);
+ }
+
+ // execute in child jvm
try {
// generate the cmd line to run in the child jvm
Context ctx = driverContext.getCtx();
diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
index 89bc1a7..54aa987 100644
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
@@ -130,7 +130,6 @@ public void iterate(AggregationBuffer agg, Object[] parameters)
assert parameters.length == 0;
((CountAgg) agg).value++;
} else {
- assert parameters.length > 0;
boolean countThisRow = true;
for (Object nextParam : parameters) {
if (nextParam == null) {
diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index ef978e3..63ecb8d 100644
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -80,6 +80,9 @@
static {
try {
conf = new HiveConf(ExecDriver.class);
+ conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true);
+ conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true);
+
SessionState.start(conf);
//convert possible incompatible Windows path in config
diff --git ql/src/test/queries/clientpositive/archive_excludeHadoop20.q ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
index 50c0faa..90757f2 100644
--- ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
+++ ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
@@ -1,5 +1,7 @@
set hive.archive.enabled = true;
set hive.enforce.bucketing = true;
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
diff --git ql/src/test/queries/clientpositive/dynpart_sort_opt_vectorization.q ql/src/test/queries/clientpositive/dynpart_sort_opt_vectorization.q
index 5f1a5ce..814f100 100644
--- ql/src/test/queries/clientpositive/dynpart_sort_opt_vectorization.q
+++ ql/src/test/queries/clientpositive/dynpart_sort_opt_vectorization.q
@@ -6,6 +6,8 @@ set hive.exec.dynamic.partition.mode=nonstrict;
set hive.vectorized.execution.enabled=true;
set hive.enforce.bucketing=false;
set hive.enforce.sorting=false;
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
create table over1k(
t tinyint,
diff --git ql/src/test/queries/clientpositive/dynpart_sort_optimization.q ql/src/test/queries/clientpositive/dynpart_sort_optimization.q
index 52b5d1e..8c3c68f 100644
--- ql/src/test/queries/clientpositive/dynpart_sort_optimization.q
+++ ql/src/test/queries/clientpositive/dynpart_sort_optimization.q
@@ -5,6 +5,8 @@ set hive.exec.max.dynamic.partitions.pernode=1000;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.enforce.bucketing=false;
set hive.enforce.sorting=false;
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
create table over1k(
t tinyint,
diff --git ql/src/test/queries/clientpositive/fetch_aggregation.q ql/src/test/queries/clientpositive/fetch_aggregation.q
index 618fea1..a56b6c8 100644
--- ql/src/test/queries/clientpositive/fetch_aggregation.q
+++ ql/src/test/queries/clientpositive/fetch_aggregation.q
@@ -1,4 +1,6 @@
set hive.fetch.task.aggr=true;
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
explain
select count(key),sum(key),avg(key),min(key),max(key),std(key),variance(key) from src;
diff --git ql/src/test/queries/clientpositive/nonmr_fetch.q ql/src/test/queries/clientpositive/nonmr_fetch.q
index e961e93..2a92d17 100644
--- ql/src/test/queries/clientpositive/nonmr_fetch.q
+++ ql/src/test/queries/clientpositive/nonmr_fetch.q
@@ -1,4 +1,6 @@
set hive.fetch.task.conversion=minimal;
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
-- backward compatible (minimal)
explain select * from src limit 10;
diff --git ql/src/test/queries/clientpositive/orc_analyze.q ql/src/test/queries/clientpositive/orc_analyze.q
index 915f4f0..3621c7a 100644
--- ql/src/test/queries/clientpositive/orc_analyze.q
+++ ql/src/test/queries/clientpositive/orc_analyze.q
@@ -1,3 +1,6 @@
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
+
CREATE TABLE orc_create_people_staging (
id int,
first_name string,
diff --git ql/src/test/queries/clientpositive/sample10.q ql/src/test/queries/clientpositive/sample10.q
index 1c6695c..d9fe744 100644
--- ql/src/test/queries/clientpositive/sample10.q
+++ ql/src/test/queries/clientpositive/sample10.q
@@ -1,4 +1,5 @@
-
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.enforce.bucketing=true;
diff --git ql/src/test/queries/clientpositive/vectorized_parquet.q ql/src/test/queries/clientpositive/vectorized_parquet.q
index 5ce1cf0..4b14628 100644
--- ql/src/test/queries/clientpositive/vectorized_parquet.q
+++ ql/src/test/queries/clientpositive/vectorized_parquet.q
@@ -1,3 +1,6 @@
+set hive.exec.submitviachild=true;
+set hive.exec.submit.local.task.via.child=true;
+
create table if not exists alltypes_parquet (
cint int,
ctinyint tinyint,
diff --git ql/src/test/results/clientnegative/cachingprintstream.q.out ql/src/test/results/clientnegative/cachingprintstream.q.out
index d231136..0acb772 100644
--- ql/src/test/results/clientnegative/cachingprintstream.q.out
+++ ql/src/test/results/clientnegative/cachingprintstream.q.out
@@ -8,30 +8,10 @@ PREHOOK: query: FROM src SELECT TRANSFORM (key, value) USING 'FAKE_SCRIPT_SHOULD
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
Begin cached logs.
PREHOOK: query: FROM src SELECT TRANSFORM (key, value) USING 'FAKE_SCRIPT_SHOULD_NOT_EXIST' AS key, value
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
End cached logs.
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/compute_stats_long.q.out ql/src/test/results/clientnegative/compute_stats_long.q.out
index c1373be..3be6320 100644
--- ql/src/test/results/clientnegative/compute_stats_long.q.out
+++ ql/src/test/results/clientnegative/compute_stats_long.q.out
@@ -20,14 +20,4 @@ select compute_stats(a, 10000) from tab_int
PREHOOK: type: QUERY
PREHOOK: Input: default@tab_int
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/dyn_part3.q.out ql/src/test/results/clientnegative/dyn_part3.q.out
index 4de5005..2897d80 100644
--- ql/src/test/results/clientnegative/dyn_part3.q.out
+++ ql/src/test/results/clientnegative/dyn_part3.q.out
@@ -9,14 +9,6 @@ PREHOOK: query: insert overwrite table nzhang_part partition(value) select key,
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@nzhang_part
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
+[Fatal Error] total number of created files now is 207, which exceeds 100. Killing the job.
#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out
index c7ed8ca..4a46354 100644
--- ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out
+++ ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out
@@ -20,14 +20,5 @@ LIMIT 50
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@max_parts
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-2
-
-Logs:
-
#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/index_compact_entry_limit.q.out ql/src/test/results/clientnegative/index_compact_entry_limit.q.out
index 2b4dfac..85614ca 100644
--- ql/src/test/results/clientnegative/index_compact_entry_limit.q.out
+++ ql/src/test/results/clientnegative/index_compact_entry_limit.q.out
@@ -29,14 +29,5 @@ PREHOOK: query: SELECT key, value FROM src WHERE key=100 ORDER BY key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 1
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
+Job Submission failed with exception 'java.io.IOException(org.apache.hadoop.hive.ql.metadata.HiveException: Number of compact index entries loaded during the query exceeded the maximum of 5 set in hive.index.compact.query.max.entries)'
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/index_compact_size_limit.q.out ql/src/test/results/clientnegative/index_compact_size_limit.q.out
index 2b4dfac..7c6bb0a 100644
--- ql/src/test/results/clientnegative/index_compact_size_limit.q.out
+++ ql/src/test/results/clientnegative/index_compact_size_limit.q.out
@@ -29,14 +29,5 @@ PREHOOK: query: SELECT key, value FROM src WHERE key=100 ORDER BY key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 1
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
+Job Submission failed with exception 'java.io.IOException(Size of data to read during a compact-index-based query exceeded the maximum of 1024 set in hive.index.compact.query.max.size)'
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/local_mapred_error_cache.q.out ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
index 60df1cd..f5cf1ea 100644
--- ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
+++ ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
@@ -2,22 +2,4 @@ PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'python ../../data/s
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
-ID: Stage-1
-org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script.
-#### A masked pattern was here ####
-org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script.
-#### A masked pattern was here ####
-org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script.
-#### A masked pattern was here ####
-Error during job, obtaining debugging information...
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/script_broken_pipe2.q.out ql/src/test/results/clientnegative/script_broken_pipe2.q.out
index e29e115..7e186a0 100644
--- ql/src/test/results/clientnegative/script_broken_pipe2.q.out
+++ ql/src/test/results/clientnegative/script_broken_pipe2.q.out
@@ -3,14 +3,4 @@ SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/script_broken_pipe3.q.out ql/src/test/results/clientnegative/script_broken_pipe3.q.out
index 1bf4fb2..575b8f2 100644
--- ql/src/test/results/clientnegative/script_broken_pipe3.q.out
+++ ql/src/test/results/clientnegative/script_broken_pipe3.q.out
@@ -3,14 +3,4 @@ SELECT TRANSFORM(*) USING 'false' AS a, b FROM (SELECT TRANSFORM(*) USING 'echo'
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/script_error.q.out ql/src/test/results/clientnegative/script_error.q.out
index c932d41..47acc55 100644
--- ql/src/test/results/clientnegative/script_error.q.out
+++ ql/src/test/results/clientnegative/script_error.q.out
@@ -47,14 +47,4 @@ FROM src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/serde_regex2.q.out ql/src/test/results/clientnegative/serde_regex2.q.out
index c68f075..8ed0bee 100644
--- ql/src/test/results/clientnegative/serde_regex2.q.out
+++ ql/src/test/results/clientnegative/serde_regex2.q.out
@@ -60,14 +60,4 @@ SELECT * FROM serde_regex ORDER BY time
PREHOOK: type: QUERY
PREHOOK: Input: default@serde_regex
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out
index 5727cfe..d9c5c06 100644
--- ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out
+++ ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out
@@ -30,14 +30,5 @@ PREHOOK: query: INSERT OVERWRITE TABLE tmptable select * from src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@tmptable
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/stats_publisher_error_1.q.out ql/src/test/results/clientnegative/stats_publisher_error_1.q.out
index f0bbc2f..ae89860 100644
--- ql/src/test/results/clientnegative/stats_publisher_error_1.q.out
+++ ql/src/test/results/clientnegative/stats_publisher_error_1.q.out
@@ -31,14 +31,5 @@ PREHOOK: query: INSERT OVERWRITE TABLE tmptable select * from src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@tmptable
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/stats_publisher_error_2.q.out ql/src/test/results/clientnegative/stats_publisher_error_2.q.out
index 2ff15eb..5cec747 100644
--- ql/src/test/results/clientnegative/stats_publisher_error_2.q.out
+++ ql/src/test/results/clientnegative/stats_publisher_error_2.q.out
@@ -30,14 +30,5 @@ PREHOOK: query: INSERT OVERWRITE TABLE tmptable select * from src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@tmptable
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/udf_assert_true.q.out ql/src/test/results/clientnegative/udf_assert_true.q.out
index 0e17231..6f18011 100644
--- ql/src/test/results/clientnegative/udf_assert_true.q.out
+++ ql/src/test/results/clientnegative/udf_assert_true.q.out
@@ -151,14 +151,4 @@ PREHOOK: query: SELECT ASSERT_TRUE(x < 2) FROM src LATERAL VIEW EXPLODE(ARRAY(1,
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/udf_assert_true2.q.out ql/src/test/results/clientnegative/udf_assert_true2.q.out
index 0506970..abc721e 100644
--- ql/src/test/results/clientnegative/udf_assert_true2.q.out
+++ ql/src/test/results/clientnegative/udf_assert_true2.q.out
@@ -69,14 +69,4 @@ PREHOOK: query: SELECT 1 + ASSERT_TRUE(x < 2) FROM src LATERAL VIEW EXPLODE(ARRA
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/udf_reflect_neg.q.out ql/src/test/results/clientnegative/udf_reflect_neg.q.out
index d65acad..7ae91b5 100644
--- ql/src/test/results/clientnegative/udf_reflect_neg.q.out
+++ ql/src/test/results/clientnegative/udf_reflect_neg.q.out
@@ -9,14 +9,4 @@ FROM src LIMIT 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/udf_test_error.q.out ql/src/test/results/clientnegative/udf_test_error.q.out
index fae8c3c..3146652 100644
--- ql/src/test/results/clientnegative/udf_test_error.q.out
+++ ql/src/test/results/clientnegative/udf_test_error.q.out
@@ -8,14 +8,4 @@ PREHOOK: query: SELECT test_error(key < 125 OR key > 130) FROM src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientnegative/udf_test_error_reduce.q.out ql/src/test/results/clientnegative/udf_test_error_reduce.q.out
index 2c4642a..c83c503 100644
--- ql/src/test/results/clientnegative/udf_test_error_reduce.q.out
+++ ql/src/test/results/clientnegative/udf_test_error_reduce.q.out
@@ -13,14 +13,4 @@ FROM (
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientpositive/authorization_explain.q.out ql/src/test/results/clientpositive/authorization_explain.q.out
index 3aeb170..853370b 100644
--- ql/src/test/results/clientpositive/authorization_explain.q.out
+++ ql/src/test/results/clientpositive/authorization_explain.q.out
@@ -16,9 +16,6 @@ CURRENT_USER:
hive_test_user
OPERATION:
QUERY
-AUTHORIZATION_FAILURES:
- No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key}
- No privilege 'Select' found for inputs { database:default, table:src, columnName:key}
Warning: Shuffle Join JOIN[4][tables = [src, srcpart]] in Stage 'Stage-1:MAPRED' is a cross product
PREHOOK: query: explain formatted authorization select * from src join srcpart
PREHOOK: type: QUERY
diff --git ql/src/test/results/clientpositive/auto_join25.q.out ql/src/test/results/clientpositive/auto_join25.q.out
index 927f898..21188f8 100644
--- ql/src/test/results/clientpositive/auto_join25.q.out
+++ ql/src/test/results/clientpositive/auto_join25.q.out
@@ -19,16 +19,6 @@ PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
PREHOOK: Output: default@dest1
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-7
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
@@ -72,28 +62,8 @@ INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@dest_j2
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-14
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-12
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
@@ -134,16 +104,6 @@ INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@dest_j1
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-7
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
diff --git ql/src/test/results/clientpositive/auto_join_without_localtask.q.out ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
index 01805ed..25d349f 100644
--- ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
+++ ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
@@ -823,27 +823,8 @@ select a.* from src a join src b on a.key=b.key join src c on a.value=c.value wh
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-14
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-5
-
-Logs:
-
#### A masked pattern was here ####
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out
index 01195d6..1508d0e 100644
--- ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out
+++ ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out
@@ -76,16 +76,6 @@ SELECT a.key, b.value FROM src a JOIN src b ON a.key = b.key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@test_table@part=1
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-7
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
POSTHOOK: query: -- This test tests the scenario when the mapper dies. So, create a conditional task for the mapjoin.
diff --git ql/src/test/results/clientpositive/mapjoin_hook.q.out ql/src/test/results/clientpositive/mapjoin_hook.q.out
index b7ab45e..815e4d5 100644
--- ql/src/test/results/clientpositive/mapjoin_hook.q.out
+++ ql/src/test/results/clientpositive/mapjoin_hook.q.out
@@ -37,16 +37,6 @@ PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
PREHOOK: Output: default@dest1
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-7
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
[MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 0 CONVERTED_MAPJOIN_LOCAL: 1 BACKUP_COMMON_JOIN: 1
@@ -60,28 +50,8 @@ INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@dest1
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-14
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
-Execution failed with exit status: 3
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-12
-
-Logs:
-
-#### A masked pattern was here ####
FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask
ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
[MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 0 CONVERTED_MAPJOIN_LOCAL: 2 BACKUP_COMMON_JOIN: 2
diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java
index 077c371..c96fc2d 100644
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java
@@ -112,9 +112,6 @@ protected StandardStructObjectInspector(List structFieldNames,
protected void init(List structFieldNames,
List structFieldObjectInspectors,
List structFieldComments) {
- assert (structFieldNames.size() == structFieldObjectInspectors.size());
- assert (structFieldComments == null ||
- (structFieldNames.size() == structFieldComments.size()));
fields = new ArrayList(structFieldNames.size());
for (int i = 0; i < structFieldNames.size(); i++) {
@@ -182,7 +179,6 @@ public Object getStructFieldData(Object data, StructField fieldRef) {
LOG.warn("ignoring similar errors.");
}
int fieldID = f.getFieldID();
- assert (fieldID >= 0 && fieldID < fields.size());
if (fieldID >= listSize) {
return null;
@@ -205,7 +201,6 @@ public Object getStructFieldData(Object data, StructField fieldRef) {
data = java.util.Arrays.asList((Object[]) data);
}
List