Index: build-common.xml
===================================================================
--- build-common.xml
+++ build-common.xml
@@ -59,7 +59,7 @@
-
+
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -191,6 +191,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFReflect;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSentences;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNPE;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSplit;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStringToMap;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct;
@@ -251,6 +252,7 @@
registerUDF("rpad", UDFRpad.class, false);
registerGenericUDF("size", GenericUDFSize.class);
+ registerGenericUDF("npe", GenericUDFNPE.class);
registerUDF("round", UDFRound.class, false);
registerUDF("floor", UDFFloor.class, false);
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
@@ -106,10 +106,6 @@
return "Ended Job = " + jobId;
}
- private String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
- return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&all=true";
- }
-
public boolean mapStarted() {
return mapProgress > 0;
}
@@ -541,7 +537,8 @@
}
// These tasks should have come from the same job.
assert (ti.getJobId() != null && ti.getJobId().equals(jobId));
- ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), t.getTaskId()));
+ ti.getLogUrls().add(ShimLoader.getHadoopShims().getTaskAttemptLogUrl(
+ t.getTaskTrackerHttp(), t.getTaskId()));
// If a task failed, then keep track of the total number of failures
// for that task (typically, a task gets re-run up to 4 times if it
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
@@ -94,9 +94,6 @@
console.printError(e.getMessage());
}
}
- private String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
- return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&start=-8193";
- }
class TaskLogGrabber implements Runnable {
@@ -146,7 +143,8 @@
}
// These tasks should have come from the same job.
assert (ti.getJobId() != null && ti.getJobId().equals(jobId));
- ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), t.getTaskId()));
+ ti.getLogUrls().add(ShimLoader.getHadoopShims().getTaskAttemptLogUrl(
+ t.getTaskTrackerHttp(), t.getTaskId()));
// If a task failed, then keep track of the total number of failures
// for that task (typically, a task gets re-run up to 4 times if it
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNPE.java
===================================================================
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNPE.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.lang.NullPointerException;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.IntWritable;
+
+/**
+ * GenericUDFNPE
+ * This UDF is to throw an Null Pointer Exception
+ * It is used to test hive failure handling
+ *
+ */
+@Description(name = "npe", value = "_FUNC_(a) - Throws an Null Pointer Exception")
+public class GenericUDFNPE extends GenericUDF {
+ private final IntWritable result = new IntWritable(-1);
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments)
+ throws UDFArgumentException {
+ return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ if (true) {
+ throw new NullPointerException("evaluate null pointer exception");
+ }
+ return result;
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ assert (children.length == 1);
+ return "npe(" + children[0] + ")";
+ }
+}
Index: ql/src/test/queries/clientnegative/cluster_npe_tasklog.q
===================================================================
--- /dev/null
+++ ql/src/test/queries/clientnegative/cluster_npe_tasklog.q
@@ -0,0 +1,4 @@
+-- TaskLog retrieval upon Null Pointer Exception in Cluster
+FROM src_thrift
+SELECT npe(src_thrift.lint)
+WHERE src_thrift.lint IS NOT NULL LIMIT 1;
Index: ql/src/test/results/clientnegative/cluster_npe_tasklog.q.out
===================================================================
--- /dev/null
+++ ql/src/test/results/clientnegative/cluster_npe_tasklog.q.out
@@ -0,0 +1,8 @@
+PREHOOK: query: -- TaskLog retrieval upon Null Pointer Exception in Cluster
+FROM src_thrift
+SELECT npe(src_thrift.lint)
+WHERE src_thrift.lint IS NOT NULL LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+#### A masked pattern was here ####
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
Index: shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
===================================================================
--- shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
+++ shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
@@ -526,6 +526,11 @@
}
@Override
+ public String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
+ return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&all=true";
+ }
+
+ @Override
public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
JobTrackerState state;
switch (clusterStatus.getJobTrackerState()) {
Index: shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
===================================================================
--- shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
+++ shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
@@ -30,6 +30,11 @@
public class Hadoop20SShims extends HadoopShimsSecure {
@Override
+ public String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
+ return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&all=true";
+ }
+
+ @Override
public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
JobTrackerState state;
switch (clusterStatus.getJobTrackerState()) {
Index: shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
===================================================================
--- shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -25,14 +25,25 @@
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.hadoop.mapreduce.util.HostUtil;
import org.apache.hadoop.util.Progressable;
/**
* Implemention of shims against Hadoop 0.23.0.
*/
public class Hadoop23Shims extends HadoopShimsSecure {
@Override
+ public String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
+ int hostIndex = taskTrackerHttpAddress.indexOf("http://");
+ int colonIndex = taskTrackerHttpAddress.lastIndexOf(":");
+ return HostUtil.getTaskLogUrl(
+ taskTrackerHttpAddress.substring(hostIndex + 7, colonIndex),
+ taskTrackerHttpAddress.substring(colonIndex + 1, taskTrackerHttpAddress.length()),
+ taskAttemptId);
+ }
+
+ @Override
public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
JobTrackerState state;
switch (clusterStatus.getJobTrackerStatus()) {
Index: shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
===================================================================
--- shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -66,6 +66,13 @@
boolean usesJobShell();
/**
+ * Constructs and Returns TaskAttempt Log Url
+ *
+ * @return TaskAttempt Log Url
+ */
+ String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId);
+
+ /**
* Return true if the job has not switched to RUNNING state yet
* and is still in PREP state
*/