diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 96a03f6..72c30c9 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -6,7 +6,7 @@ minimr.query.files=auto_sortmerge_join_16.q,\ bucket_many.q,\ bucket_num_reducers.q,\ bucket_num_reducers2.q,\ - bucketizedhiveinputformat.q,\ + beucketizedhiveinputformat.q,\ bucketmapjoin6.q,\ bucketmapjoin7.q,\ disable_merge_for_bucketing.q,\ @@ -701,9 +701,7 @@ minimr.query.negative.files=cluster_tasklog_retrieval.q,\ file_with_header_footer_negative.q,\ local_mapred_error_cache.q,\ mapreduce_stack_trace.q,\ - mapreduce_stack_trace_hadoop20.q,\ mapreduce_stack_trace_turnoff.q,\ - mapreduce_stack_trace_turnoff_hadoop20.q,\ minimr_broken_pipe.q,\ table_nonprintable_negative.q,\ udf_local_resource.q diff --git a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q b/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q deleted file mode 100644 index 9d0548c..0000000 --- a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q +++ /dev/null @@ -1,13 +0,0 @@ -set hive.exec.mode.local.auto=false; -set hive.exec.job.debug.capture.stacktraces=true; -set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateStackTracesHook; - -FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value); - --- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.23) --- Hadoop 0.23 changes the getTaskDiagnostics behavior --- The Error Code of hive failure MapReduce job changes --- In Hadoop 0.20 --- Hive failure MapReduce job gets 20000 as Error Code --- In Hadoop 0.23 --- Hive failure MapReduce job gets 2 as Error Code diff --git a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q b/ql/src/test/queries/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q deleted file mode 100644 index e319944..0000000 --- a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q +++ /dev/null @@ -1,13 +0,0 @@ -set hive.exec.mode.local.auto=false; -set hive.exec.job.debug.capture.stacktraces=false; -set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateStackTracesHook; - -FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value); - --- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.23) --- Hadoop 0.23 changes the getTaskDiagnostics behavior --- The Error Code of hive failure MapReduce job changes --- In Hadoop 0.20 --- Hive failure MapReduce job gets 20000 as Error Code --- In Hadoop 0.23 --- Hive failure MapReduce job gets 2 as Error Code diff --git a/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out b/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out deleted file mode 100644 index dda4216..0000000 --- a/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out +++ /dev/null @@ -1,13 +0,0 @@ -PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value) -PREHOOK: type: QUERY -PREHOOK: Input: default@src -#### A masked pattern was here #### -FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"} -Hive Runtime Error while processing row {"key":"238","value":"val_238"} -FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"} -Hive Runtime Error while processing row {"key":"238","value":"val_238"} -FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"} -Hive Runtime Error while processing row {"key":"238","value":"val_238"} -FATAL org.apache.hadoop.hive.ql.exec.mr.ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"} -Hive Runtime Error while processing row {"key":"238","value":"val_238"} -FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script. diff --git a/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out b/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out deleted file mode 100644 index dfc8f54..0000000 --- a/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out +++ /dev/null @@ -1,5 +0,0 @@ -PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value) -PREHOOK: type: QUERY -PREHOOK: Input: default@src -#### A masked pattern was here #### -FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script. diff --git a/testutils/ptest2/src/test/resources/test-configuration2.properties b/testutils/ptest2/src/test/resources/test-configuration2.properties index 19e1ac2..bb4e77c 100644 --- a/testutils/ptest2/src/test/resources/test-configuration2.properties +++ b/testutils/ptest2/src/test/resources/test-configuration2.properties @@ -107,7 +107,6 @@ qFileTest.miniMrNegative.include = normal qFileTest.miniMrNegative.isolate = flaky # normal are tests that run in minimr mode via build-common.xml qFileTest.miniMrNegative.groups.normal = mainProperties.${minimr.query.negative.files} -qFileTest.miniMrNegative.groups.flaky = mapreduce_stack_trace_hadoop20.q qFileTest.clientNegative.driver = TestNegativeCliDriver qFileTest.clientNegative.directory = ql/src/test/queries/clientnegative @@ -118,7 +117,7 @@ qFileTest.clientNegative.exclude = miniMrNormal failing # Disable for HIVE-4941 as this tests runs via ant test #qFileTest.clientNegative.groups.failing = stats_aggregator_error_1.q # normal are run via minimr -qFileTest.clientNegative.groups.miniMrNormal = cluster_tasklog_retrieval.q minimr_broken_pipe.q mapreduce_stack_trace.q mapreduce_stack_trace_turnoff.q mapreduce_stack_trace_hadoop20.q mapreduce_stack_trace_turnoff_hadoop20.q +qFileTest.clientNegative.groups.miniMrNormal = cluster_tasklog_retrieval.q minimr_broken_pipe.q mapreduce_stack_trace.q mapreduce_stack_trace_turnoff.q qFileTest.hbasePositive.driver = TestHBaseCliDriver qFileTest.hbasePositive.directory = hbase-handler/src/test/queries/positive