diff --git build-common.xml build-common.xml
index a6f6547..12c9284 100644
--- build-common.xml
+++ build-common.xml
@@ -59,7 +59,7 @@
-
+
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 4ce3dbf..d62a6b6 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -191,7 +191,6 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFReflect;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSentences;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNPE;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSplit;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStringToMap;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct;
@@ -252,7 +251,6 @@ public final class FunctionRegistry {
registerUDF("rpad", UDFRpad.class, false);
registerGenericUDF("size", GenericUDFSize.class);
- registerGenericUDF("npe", GenericUDFNPE.class);
registerUDF("round", UDFRound.class, false);
registerUDF("floor", UDFFloor.class, false);
diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNPE.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNPE.java
deleted file mode 100644
index b3232db..0000000
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNPE.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.udf.generic;
-
-import java.lang.NullPointerException;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.io.IntWritable;
-
-/**
- * GenericUDFNPE
- * This UDF is to throw an Null Pointer Exception
- * It is used to test hive failure handling
- *
- */
-@Description(name = "npe", value = "_FUNC_(a) - Throws an Null Pointer Exception")
-public class GenericUDFNPE extends GenericUDF {
- private final IntWritable result = new IntWritable(-1);
-
- @Override
- public ObjectInspector initialize(ObjectInspector[] arguments)
- throws UDFArgumentException {
- return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
- }
-
- @Override
- public Object evaluate(DeferredObject[] arguments) throws HiveException {
- if (true) {
- throw new NullPointerException("evaluate null pointer exception");
- }
- return result;
- }
-
- @Override
- public String getDisplayString(String[] children) {
- assert (children.length == 1);
- return "npe(" + children[0] + ")";
- }
-}
diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
new file mode 100644
index 0000000..9b3ee33
--- /dev/null
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.lang.NullPointerException;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+
+/**
+ * GenericUDFEvaluateNPE
+ * This UDF is to throw an Null Pointer Exception
+ * It is used to test hive failure handling
+ *
+ */
+@Description(name = "evaluate_npe", value = "_FUNC_(string) - Throws an Null Pointer Exception")
+public class GenericUDFEvaluateNPE extends GenericUDF {
+ private ObjectInspector[] argumentOIs;
+ private final Text result= new Text();
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments)
+ throws UDFArgumentException {
+ if (arguments.length != 1) {
+ throw new UDFArgumentLengthException(
+ "The function evaluate_npe(string)"
+ + "needs only one argument.");
+ }
+
+ if (!arguments[0].getTypeName().equals(Constants.STRING_TYPE_NAME)) {
+ throw new UDFArgumentTypeException(0,
+ "Argument 1 of function evaluate_npe must be \""
+ + Constants.STRING_TYPE_NAME + "but \""
+ + arguments[0].getTypeName() + "\" was found.");
+ }
+
+ argumentOIs = arguments;
+ return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ if (true) {
+ throw new NullPointerException("evaluate null pointer exception");
+ }
+ return result;
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ assert (children.length == 1);
+ return "evaluate_npe(" + children[0] + ")";
+ }
+}
diff --git ql/src/test/queries/clientnegative/cluster_npe_tasklog.q ql/src/test/queries/clientnegative/cluster_npe_tasklog.q
deleted file mode 100644
index 42ba56b..0000000
--- ql/src/test/queries/clientnegative/cluster_npe_tasklog.q
+++ /dev/null
@@ -1,4 +0,0 @@
--- TaskLog retrieval upon Null Pointer Exception in Cluster
-FROM src_thrift
-SELECT npe(src_thrift.lint)
-WHERE src_thrift.lint IS NOT NULL LIMIT 1;
diff --git ql/src/test/queries/clientnegative/cluster_tasklog_retrieval.q ql/src/test/queries/clientnegative/cluster_tasklog_retrieval.q
new file mode 100644
index 0000000..bc98044
--- /dev/null
+++ ql/src/test/queries/clientnegative/cluster_tasklog_retrieval.q
@@ -0,0 +1,6 @@
+-- TaskLog retrieval upon Null Pointer Exception in Cluster
+
+CREATE TEMPORARY FUNCTION evaluate_npe AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFEvaluateNPE';
+
+FROM src
+SELECT evaluate_npe(src.key) LIMIT 1;
diff --git ql/src/test/results/clientnegative/cluster_npe_tasklog.q.out ql/src/test/results/clientnegative/cluster_npe_tasklog.q.out
deleted file mode 100644
index 8ec1e46..0000000
--- ql/src/test/results/clientnegative/cluster_npe_tasklog.q.out
+++ /dev/null
@@ -1,8 +0,0 @@
-PREHOOK: query: -- TaskLog retrieval upon Null Pointer Exception in Cluster
-FROM src_thrift
-SELECT npe(src_thrift.lint)
-WHERE src_thrift.lint IS NOT NULL LIMIT 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_thrift
-#### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
diff --git ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out
new file mode 100644
index 0000000..457980a
--- /dev/null
+++ ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out
@@ -0,0 +1,14 @@
+PREHOOK: query: -- TaskLog retrieval upon Null Pointer Exception in Cluster
+
+CREATE TEMPORARY FUNCTION evaluate_npe AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFEvaluateNPE'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: -- TaskLog retrieval upon Null Pointer Exception in Cluster
+
+CREATE TEMPORARY FUNCTION evaluate_npe AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFEvaluateNPE'
+POSTHOOK: type: CREATEFUNCTION
+PREHOOK: query: FROM src
+SELECT evaluate_npe(src.key) LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
diff --git shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
index 6834272..9ddd682 100644
--- shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
+++ shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
@@ -527,6 +527,8 @@ public class Hadoop20Shims implements HadoopShims {
@Override
public String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
+ // In hadoop 0.20, no utility function is available to construct task log url
+ // Manually constructing the URL here
return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&all=true";
}
diff --git shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
index 865024f..2b643c2 100644
--- shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
+++ shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
@@ -31,6 +31,8 @@ public class Hadoop20SShims extends HadoopShimsSecure {
@Override
public String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
+ // In hadoop 0.20, no utility function is available to construct task log url
+ // Manually constructing the URL here
return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&all=true";
}
diff --git shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index 9e23c73..aeb57e4 100644
--- shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -28,6 +28,10 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.util.HostUtil;
import org.apache.hadoop.util.Progressable;
+import java.net.URL;
+import java.lang.Integer;
+import java.net.MalformedURLException;
+
/**
* Implemention of shims against Hadoop 0.23.0.
*/
@@ -35,12 +39,15 @@ public class Hadoop23Shims extends HadoopShimsSecure {
@Override
public String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String taskAttemptId) {
- int hostIndex = taskTrackerHttpAddress.indexOf("http://");
- int colonIndex = taskTrackerHttpAddress.lastIndexOf(":");
- return HostUtil.getTaskLogUrl(
- taskTrackerHttpAddress.substring(hostIndex + 7, colonIndex),
- taskTrackerHttpAddress.substring(colonIndex + 1, taskTrackerHttpAddress.length()),
- taskAttemptId);
+ URL taskTrackerHttpURL = null;
+ try {
+ taskTrackerHttpURL = new URL(taskTrackerHttpAddress);
+ } catch (MalformedURLException e) {
+ // ignore exception here
+ }
+ return HostUtil.getTaskLogUrl(taskTrackerHttpURL.getHost(),
+ Integer.toString(taskTrackerHttpURL.getPort()),
+ taskAttemptId);
}
@Override