Index: ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFNvl2.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFNvl2.java (revision ) +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFNvl2.java (revision ) @@ -0,0 +1,103 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf; + +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.Text; +import org.junit.Assert; +import org.junit.Test; + +public class TestUDFNvl2 { + private static final String AVAILABLE = "Available"; + private static final String NA = "N/A"; + + @Test + public void testNotNull() throws HiveException { + UDFNvl2 udf = new UDFNvl2(); + + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableStringObjectInspector, + PrimitiveObjectInspectorFactory.writableStringObjectInspector, + PrimitiveObjectInspectorFactory.writableStringObjectInspector, + }; + + GenericUDF.DeferredObject[] args = { + new GenericUDF.DeferredJavaObject( new Text("not null text") ), + new GenericUDF.DeferredJavaObject( new Text(AVAILABLE) ), + new GenericUDF.DeferredJavaObject( new Text(NA) ), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.stringTypeInfo, oi.getTypeInfo()); + Text res = (Text) udf.evaluate(args); + Assert.assertEquals(AVAILABLE, res.toString()); + } + + @Test + public void testNull() throws HiveException { + UDFNvl2 udf = new UDFNvl2(); + + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableStringObjectInspector, + PrimitiveObjectInspectorFactory.writableStringObjectInspector, + PrimitiveObjectInspectorFactory.writableStringObjectInspector, + }; + + GenericUDF.DeferredObject[] args = { + new GenericUDF.DeferredJavaObject( null ), + new GenericUDF.DeferredJavaObject( new Text(AVAILABLE) ), + new GenericUDF.DeferredJavaObject( new Text(NA) ), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.stringTypeInfo, oi.getTypeInfo()); + Text res = (Text) udf.evaluate(args); + Assert.assertEquals(NA, res.toString()); + } + + @Test + public void testNotNullInt() throws HiveException { + UDFNvl2 udf = new UDFNvl2(); + + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableIntObjectInspector, + PrimitiveObjectInspectorFactory.writableIntObjectInspector, + PrimitiveObjectInspectorFactory.writableIntObjectInspector, + }; + + GenericUDF.DeferredObject[] args = { + new GenericUDF.DeferredJavaObject( new IntWritable(12)), + new GenericUDF.DeferredJavaObject( new IntWritable(1) ), + new GenericUDF.DeferredJavaObject( new IntWritable(0) ), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.intTypeInfo, oi.getTypeInfo()); + IntWritable res = (IntWritable) udf.evaluate(args); + Assert.assertEquals(1, res.get()); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (date 1387352662000) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision ) @@ -44,76 +44,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.udf.SettableUDF; -import org.apache.hadoop.hive.ql.udf.UDAFPercentile; -import org.apache.hadoop.hive.ql.udf.UDFAcos; -import org.apache.hadoop.hive.ql.udf.UDFAscii; -import org.apache.hadoop.hive.ql.udf.UDFAsin; -import org.apache.hadoop.hive.ql.udf.UDFAtan; -import org.apache.hadoop.hive.ql.udf.UDFBase64; -import org.apache.hadoop.hive.ql.udf.UDFBin; -import org.apache.hadoop.hive.ql.udf.UDFConv; -import org.apache.hadoop.hive.ql.udf.UDFCos; -import org.apache.hadoop.hive.ql.udf.UDFDate; -import org.apache.hadoop.hive.ql.udf.UDFDateAdd; -import org.apache.hadoop.hive.ql.udf.UDFDateDiff; -import org.apache.hadoop.hive.ql.udf.UDFDateSub; -import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth; -import org.apache.hadoop.hive.ql.udf.UDFDegrees; -import org.apache.hadoop.hive.ql.udf.UDFE; -import org.apache.hadoop.hive.ql.udf.UDFExp; -import org.apache.hadoop.hive.ql.udf.UDFFindInSet; -import org.apache.hadoop.hive.ql.udf.UDFFromUnixTime; -import org.apache.hadoop.hive.ql.udf.UDFHex; -import org.apache.hadoop.hive.ql.udf.UDFHour; -import org.apache.hadoop.hive.ql.udf.UDFJson; -import org.apache.hadoop.hive.ql.udf.UDFLTrim; -import org.apache.hadoop.hive.ql.udf.UDFLength; -import org.apache.hadoop.hive.ql.udf.UDFLike; -import org.apache.hadoop.hive.ql.udf.UDFLn; -import org.apache.hadoop.hive.ql.udf.UDFLog; -import org.apache.hadoop.hive.ql.udf.UDFLog10; -import org.apache.hadoop.hive.ql.udf.UDFLog2; -import org.apache.hadoop.hive.ql.udf.UDFLpad; -import org.apache.hadoop.hive.ql.udf.UDFMinute; -import org.apache.hadoop.hive.ql.udf.UDFMonth; -import org.apache.hadoop.hive.ql.udf.UDFOPBitAnd; -import org.apache.hadoop.hive.ql.udf.UDFOPBitNot; -import org.apache.hadoop.hive.ql.udf.UDFOPBitOr; -import org.apache.hadoop.hive.ql.udf.UDFOPBitXor; -import org.apache.hadoop.hive.ql.udf.UDFOPLongDivide; -import org.apache.hadoop.hive.ql.udf.UDFPI; -import org.apache.hadoop.hive.ql.udf.UDFParseUrl; -import org.apache.hadoop.hive.ql.udf.UDFRTrim; -import org.apache.hadoop.hive.ql.udf.UDFRadians; -import org.apache.hadoop.hive.ql.udf.UDFRand; -import org.apache.hadoop.hive.ql.udf.UDFRegExp; -import org.apache.hadoop.hive.ql.udf.UDFRegExpExtract; -import org.apache.hadoop.hive.ql.udf.UDFRegExpReplace; -import org.apache.hadoop.hive.ql.udf.UDFRepeat; -import org.apache.hadoop.hive.ql.udf.UDFReverse; -import org.apache.hadoop.hive.ql.udf.UDFRpad; -import org.apache.hadoop.hive.ql.udf.UDFSecond; -import org.apache.hadoop.hive.ql.udf.UDFSign; -import org.apache.hadoop.hive.ql.udf.UDFSin; -import org.apache.hadoop.hive.ql.udf.UDFSpace; -import org.apache.hadoop.hive.ql.udf.UDFSqrt; -import org.apache.hadoop.hive.ql.udf.UDFSubstr; -import org.apache.hadoop.hive.ql.udf.UDFTan; -import org.apache.hadoop.hive.ql.udf.UDFToBoolean; -import org.apache.hadoop.hive.ql.udf.UDFToByte; -import org.apache.hadoop.hive.ql.udf.UDFToDouble; -import org.apache.hadoop.hive.ql.udf.UDFToFloat; -import org.apache.hadoop.hive.ql.udf.UDFToInteger; -import org.apache.hadoop.hive.ql.udf.UDFToLong; -import org.apache.hadoop.hive.ql.udf.UDFToShort; -import org.apache.hadoop.hive.ql.udf.UDFToString; -import org.apache.hadoop.hive.ql.udf.UDFTrim; -import org.apache.hadoop.hive.ql.udf.UDFType; -import org.apache.hadoop.hive.ql.udf.UDFUnbase64; -import org.apache.hadoop.hive.ql.udf.UDFUnhex; -import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear; -import org.apache.hadoop.hive.ql.udf.UDFYear; +import org.apache.hadoop.hive.ql.udf.*; import org.apache.hadoop.hive.ql.udf.generic.*; import org.apache.hadoop.hive.ql.udf.ptf.MatchPath.MatchPathResolver; import org.apache.hadoop.hive.ql.udf.ptf.Noop.NoopResolver; @@ -246,6 +177,7 @@ registerUDF("regexp_extract", UDFRegExpExtract.class, false); registerUDF("parse_url", UDFParseUrl.class, false); registerGenericUDF("nvl", GenericUDFNvl.class); + registerGenericUDF("nvl2", UDFNvl2.class); registerGenericUDF("split", GenericUDFSplit.class); registerGenericUDF("str_to_map", GenericUDFStringToMap.class); registerGenericUDF("translate", GenericUDFTranslate.class); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFNvl2.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFNvl2.java (revision ) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFNvl2.java (revision ) @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.udf; + +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNvl; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +@Description(name = "nvl2", + value = "_FUNC_(x) - substitutes a value when a null value is encountered as well as when a non-null value is encountered.", + extended = "Example:\n" + + " > SELECT _FUNC_(null, 'Available', 'n/a') FROM src LIMIT 1;\n" + " 'n/a'") +public class UDFNvl2 extends GenericUDFNvl { + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + argumentOIs = arguments; + if (arguments.length != 3) { + throw new UDFArgumentLengthException( + "The function 'NVL2' accepts 3 arguments."); + } + returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true); + if (!(returnOIResolver.update(arguments[0]) && + returnOIResolver.update(arguments[1]) && + returnOIResolver.update(arguments[2]))) { + throw new UDFArgumentTypeException(2, + "The all arguments of function NLV2 should have the same type, " + + "but they are different: \"" + arguments[0].getTypeName() + + "\" and \"" + arguments[1].getTypeName() + + "\" and \"" + arguments[2].getTypeName() + "\""); + } + return returnOIResolver.get(); + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + Object retVal = returnOIResolver.convertIfNecessary(arguments[0].get(), + argumentOIs[0]); + + if (retVal != null ){ + retVal = returnOIResolver.convertIfNecessary(arguments[1].get(), + argumentOIs[1]); + } else { + retVal = returnOIResolver.convertIfNecessary(arguments[2].get(), + argumentOIs[2]); + } + return retVal; + } + + @Override + public String getDisplayString(String[] children) { + return String.format("If %s is null returns %s, otherwise returns %s", children[0], children[1], children[2]); + } + +} \ No newline at end of file Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNvl.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNvl.java (date 1387352662000) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNvl.java (revision ) @@ -29,8 +29,8 @@ extended = "Example:\n" + " > SELECT _FUNC_(null,'bla') FROM src LIMIT 1;\n" + " bla") public class GenericUDFNvl extends GenericUDF{ - private transient GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver; - private transient ObjectInspector[] argumentOIs; + protected transient GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver; + protected transient ObjectInspector[] argumentOIs; @Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { Index: ql/src/test/results/clientpositive/show_functions.q.out IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/test/results/clientpositive/show_functions.q.out (date 1387352662000) +++ ql/src/test/results/clientpositive/show_functions.q.out (revision ) @@ -120,6 +120,7 @@ not ntile nvl +nvl2 or parse_url parse_url_tuple Index: ql/src/test/queries/clientpositive/udf_nvl2.q IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/test/queries/clientpositive/udf_nvl2.q (revision ) +++ ql/src/test/queries/clientpositive/udf_nvl2.q (revision ) @@ -0,0 +1,13 @@ +set hive.fetch.task.conversion=more; + +DESCRIBE FUNCTION nvl2; +DESCRIBE FUNCTION EXTENDED nvl2; + +EXPLAIN +SELECT NVL2( 1 , 1, 0 ) AS COL1, + NVL2( NULL, 1, 0 ) AS COL2 +FROM src tablesample (1 rows); + +SELECT NVL2( 1 , 1, 0 ) AS COL1, + NVL2( NULL, 1, 0 ) AS COL2 +FROM src tablesample (1 rows); \ No newline at end of file Index: ql/src/test/results/clientpositive/udf_nvl2.q.out IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/test/results/clientpositive/udf_nvl2.q.out (revision ) +++ ql/src/test/results/clientpositive/udf_nvl2.q.out (revision ) @@ -0,0 +1,59 @@ +PREHOOK: query: DESCRIBE FUNCTION nvl2 +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION nvl2 +POSTHOOK: type: DESCFUNCTION +nvl2(x) - substitutes a value when a null value is encountered as well as when a non-null value is encountered. +PREHOOK: query: DESCRIBE FUNCTION EXTENDED nvl2 +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION EXTENDED nvl2 +POSTHOOK: type: DESCFUNCTION +nvl2(x) - substitutes a value when a null value is encountered as well as when a non-null value is encountered. +Example: + > SELECT nvl2(null, 'Available', 'n/a') FROM src LIMIT 1; + 'n/a' +PREHOOK: query: EXPLAIN +SELECT NVL2( 1 , 1, 0 ) AS COL1, + NVL2( NULL, 1, 0 ) AS COL2 +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT NVL2( 1 , 1, 0 ) AS COL1, + NVL2( NULL, 1, 0 ) AS COL2 +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION NVL2 1 1 0) COL1) (TOK_SELEXPR (TOK_FUNCTION NVL2 TOK_NULL 1 0) COL2)))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: + expr: If 1 is null returns 1, otherwise returns 0 + type: int + expr: If null is null returns 1, otherwise returns 0 + type: int + outputColumnNames: _col0, _col1 + ListSink + +PREHOOK: query: SELECT NVL2( 1 , 1, 0 ) AS COL1, + NVL2( NULL, 1, 0 ) AS COL2 +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT NVL2( 1 , 1, 0 ) AS COL1, + NVL2( NULL, 1, 0 ) AS COL2 +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +1 0