From 15b2fa056517ae3963fcbcf8e4144ae142843cdf Mon Sep 17 00:00:00 2001 From: Xiaobing Zhou Date: Thu, 5 Feb 2015 15:27:17 -0800 Subject: [PATCH] HIVE-9480: Build UDF TRUNC to implement FIRST_DAY as compared with LAST_DAY --- .../hadoop/hive/ql/exec/FunctionRegistry.java | 1 + .../hive/ql/udf/generic/GenericUDFTrunc.java | 197 +++++++++++++++ .../hive/ql/udf/generic/TestGenericUDFTrunc.java | 262 +++++++++++++++++++ .../test/queries/clientnegative/udf_trunc_error1.q | 1 + .../test/queries/clientnegative/udf_trunc_error2.q | 1 + .../test/queries/clientnegative/udf_trunc_error3.q | 1 + .../test/queries/clientnegative/udf_trunc_error4.q | 1 + .../test/queries/clientnegative/udf_trunc_error5.q | 1 + ql/src/test/queries/clientpositive/udf_trunc.q | 102 ++++++++ .../results/clientnegative/udf_trunc_error1.q.out | 1 + .../results/clientnegative/udf_trunc_error2.q.out | 1 + .../results/clientnegative/udf_trunc_error3.q.out | 1 + .../results/clientnegative/udf_trunc_error4.q.out | 1 + .../results/clientnegative/udf_trunc_error5.q.out | 1 + .../results/clientpositive/show_functions.q.out | 1 + ql/src/test/results/clientpositive/udf_trunc.q.out | 279 +++++++++++++++++++++ 16 files changed, 852 insertions(+) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java create mode 100644 ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java create mode 100644 ql/src/test/queries/clientnegative/udf_trunc_error1.q create mode 100644 ql/src/test/queries/clientnegative/udf_trunc_error2.q create mode 100644 ql/src/test/queries/clientnegative/udf_trunc_error3.q create mode 100644 ql/src/test/queries/clientnegative/udf_trunc_error4.q create mode 100644 ql/src/test/queries/clientnegative/udf_trunc_error5.q create mode 100644 ql/src/test/queries/clientpositive/udf_trunc.q create mode 100644 ql/src/test/results/clientnegative/udf_trunc_error1.q.out create mode 100644 ql/src/test/results/clientnegative/udf_trunc_error2.q.out create mode 100644 ql/src/test/results/clientnegative/udf_trunc_error3.q.out create mode 100644 ql/src/test/results/clientnegative/udf_trunc_error4.q.out create mode 100644 ql/src/test/results/clientnegative/udf_trunc_error5.q.out create mode 100644 ql/src/test/results/clientpositive/udf_trunc.q.out diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index bfb4dc2..5117426 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -272,6 +272,7 @@ registerUDF("from_unixtime", UDFFromUnixTime.class, false); registerGenericUDF("to_date", GenericUDFDate.class); registerUDF("weekofyear", UDFWeekOfYear.class, false); + registerGenericUDF("trunc", GenericUDFTrunc.class); registerGenericUDF("last_day", GenericUDFLastDay.class); registerGenericUDF("date_add", GenericUDFDateAdd.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java new file mode 100644 index 0000000..aa990ff --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java @@ -0,0 +1,197 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; + +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter; +import org.apache.hadoop.io.Text; + +/** + * GenericUDFTrunc. + * + * Returns the first day of the month which the date belongs to. + * The time part of the date will be ignored. + * + */ +@Description(name = "trunc", +value = "_FUNC_(date, fmt) - Returns date with the day truncated to the first day of the " + + "month which the date belongs to. The fmt is 'MM'.", +extended = "date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'." + + " The time part of date is ignored.\n" + + "Example:\n " + " > SELECT _FUNC_('2009-01-12', 'MM') FROM src LIMIT 1;\n" + " '2009-01-01'") +public class GenericUDFTrunc extends GenericUDF { + + private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); + private transient TimestampConverter timestampConverter; + private transient Converter textConverter1; + private transient Converter textConverter2; + private transient Converter dateWritableConverter; + private transient PrimitiveCategory inputType1; + private transient PrimitiveCategory inputType2; + private final Calendar calendar = Calendar.getInstance(); + private final Text output = new Text(); + private transient final String FORMAT_MODEL = "MM"; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + if (arguments.length != 2) { + throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length); + } + + if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) { + throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + + arguments[0].getTypeName() + " is passed. as first arguments"); + } + + if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) { + throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + + arguments[1].getTypeName() + " is passed. as second arguments"); + } + + ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; + inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory(); + switch (inputType1) { + case STRING: + case VARCHAR: + case CHAR: + inputType1 = PrimitiveCategory.STRING; + textConverter1 = ObjectInspectorConverters.getConverter( + (PrimitiveObjectInspector) arguments[0], + PrimitiveObjectInspectorFactory.writableStringObjectInspector); + break; + case TIMESTAMP: + timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], + PrimitiveObjectInspectorFactory.writableTimestampObjectInspector); + break; + case DATE: + dateWritableConverter = ObjectInspectorConverters.getConverter( + (PrimitiveObjectInspector) arguments[0], + PrimitiveObjectInspectorFactory.writableDateObjectInspector); + break; + default: + throw new UDFArgumentException( + " TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + + inputType1); + } + + inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory(); + switch (inputType2) { + case STRING: + case VARCHAR: + case CHAR: + inputType2 = PrimitiveCategory.STRING; + textConverter2 = ObjectInspectorConverters.getConverter( + (PrimitiveObjectInspector) arguments[1], + PrimitiveObjectInspectorFactory.writableStringObjectInspector); + break; + default: + throw new UDFArgumentException( + " TRUNC() only takes STRING type as second argument, got " + + inputType2); + } + + return outputOI; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + if (arguments.length != 2) { + throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length); + } + + if (arguments[0].get() == null || arguments[1].get() == null) { + return null; + } + + String fmt = textConverter2.convert(arguments[1].get()).toString(); + if (!fmt.equals(FORMAT_MODEL)) + throw new UDFArgumentException(" TRUNC() only takes 'MM' format, got " + "'" + fmt + "'"); + + Date date; + switch (inputType1) { + case STRING: + String dateString = textConverter1.convert(arguments[0].get()).toString(); + try { + date = formatter.parse(dateString.toString()); + } catch (ParseException e) { + return null; + } + firstDay(date); + break; + case TIMESTAMP: + Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())) + .getTimestamp(); + date = ts; + firstDay(date); + break; + case DATE: + DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get()); + date = dw.get(); + firstDay(date); + break; + default: + throw new UDFArgumentException("TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + + inputType1); + } + Date newDate = calendar.getTime(); + output.set(formatter.format(newDate)); + return output; + } + + @Override + public String getDisplayString(String[] children) { + StringBuilder sb = new StringBuilder(); + sb.append("trunc("); + if (children.length > 0) { + sb.append(children[0]); + for (int i = 1; i < children.length; i++) { + sb.append(", "); + sb.append(children[i]); + } + } + sb.append(")"); + return sb.toString(); + } + + private Calendar firstDay(Date d) { + calendar.setTime(d); + int minDd = calendar.getActualMinimum(Calendar.DAY_OF_MONTH); + calendar.set(Calendar.DAY_OF_MONTH, minDd); + return calendar; + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java new file mode 100644 index 0000000..8ef368e --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java @@ -0,0 +1,262 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import java.sql.Date; +import java.sql.Timestamp; + +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.io.Text; + +import junit.framework.TestCase; + +public class TestGenericUDFTrunc extends TestCase { + + DeferredObject valueObjFmt = new DeferredJavaObject(new Text("MM")); + + public void testStringToDate() throws HiveException { + GenericUDFTrunc udf = new GenericUDFTrunc(); + ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; + ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; + ObjectInspector[] initArgs = { valueOI0, valueOI1}; + + DeferredObject valueObjFmt = new DeferredJavaObject(new Text("MM")); + + DeferredObject valueObj0; + DeferredObject[] evalArgs; + + // test date string + valueObj0 = new DeferredJavaObject(new Text("2014-01-01")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-01-14")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-01-31")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-02-02")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-02-28")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2016-02-03")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2016-02-28")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2016-02-29")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + // test timestamp string + valueObj0 = new DeferredJavaObject(new Text("2014-01-01 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-01-14 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-01-31 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-02-02 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2014-02-28 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2016-02-03 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2016-02-28 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new Text("2016-02-29 10:30:45")); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + } + + public void testTimestampToDate() throws HiveException { + GenericUDFTrunc udf = new GenericUDFTrunc(); + ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; + ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; + ObjectInspector[] initArgs = { valueOI0, valueOI1}; + + DeferredObject valueObjFmt = new DeferredJavaObject(new Text("MM")); + + DeferredObject valueObj0; + DeferredObject[] evalArgs; + + // test date string + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-01-01 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-01-14 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-01-31 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-02-02 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-02-28 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2016-02-03 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2016-02-28 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2016-02-29 00:00:00"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + // test timestamp string + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-01-01 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-01-14 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-01-31 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-02-02 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2014-02-28 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2016-02-03 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2016-02-28 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new TimestampWritable( + Timestamp.valueOf("2016-02-29 10:30:45"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + } + + public void testDateWritableToDate() throws HiveException { + GenericUDFTrunc udf = new GenericUDFTrunc(); + ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; + ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; + ObjectInspector[] initArgs = { valueOI0, valueOI1}; + + DeferredObject valueObjFmt = new DeferredJavaObject(new Text("MM")); + + DeferredObject valueObj0; + DeferredObject[] evalArgs; + + // test date string + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-01"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-14"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-31"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-01-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-02"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-28"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2014-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-03"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-28"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + + valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-29"))); + evalArgs = new DeferredObject[] { valueObj0, valueObjFmt }; + runAndVerify("2016-02-01", udf, initArgs, evalArgs); + } + + private void runAndVerify(String expResult, GenericUDF udf, ObjectInspector[] initArgs, + DeferredObject[] evalArgs) throws HiveException { + udf.initialize(initArgs); + Text output = (Text) udf.evaluate(evalArgs); + assertEquals("frist_day() test ", expResult, output.toString()); + } +} diff --git a/ql/src/test/queries/clientnegative/udf_trunc_error1.q b/ql/src/test/queries/clientnegative/udf_trunc_error1.q new file mode 100644 index 0000000..048b57b --- /dev/null +++ b/ql/src/test/queries/clientnegative/udf_trunc_error1.q @@ -0,0 +1 @@ +SELECT TRUNC('2014-01-01', 'M') FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientnegative/udf_trunc_error2.q b/ql/src/test/queries/clientnegative/udf_trunc_error2.q new file mode 100644 index 0000000..ca62363 --- /dev/null +++ b/ql/src/test/queries/clientnegative/udf_trunc_error2.q @@ -0,0 +1 @@ +SELECT TRUNC('2014-01-01', 1) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientnegative/udf_trunc_error3.q b/ql/src/test/queries/clientnegative/udf_trunc_error3.q new file mode 100644 index 0000000..d1d5d53 --- /dev/null +++ b/ql/src/test/queries/clientnegative/udf_trunc_error3.q @@ -0,0 +1 @@ +SELECT TRUNC('2014-01-01', null) FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientnegative/udf_trunc_error4.q b/ql/src/test/queries/clientnegative/udf_trunc_error4.q new file mode 100644 index 0000000..8f592b3 --- /dev/null +++ b/ql/src/test/queries/clientnegative/udf_trunc_error4.q @@ -0,0 +1 @@ +SELECT TRUNC(1.0, 'MM') FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientnegative/udf_trunc_error5.q b/ql/src/test/queries/clientnegative/udf_trunc_error5.q new file mode 100644 index 0000000..b825943 --- /dev/null +++ b/ql/src/test/queries/clientnegative/udf_trunc_error5.q @@ -0,0 +1 @@ +SELECT TRUNC(null, 'MM') FROM src tablesample (1 rows); diff --git a/ql/src/test/queries/clientpositive/udf_trunc.q b/ql/src/test/queries/clientpositive/udf_trunc.q new file mode 100644 index 0000000..e3589b5 --- /dev/null +++ b/ql/src/test/queries/clientpositive/udf_trunc.q @@ -0,0 +1,102 @@ +DESCRIBE FUNCTION trunc; +DESCRIBE FUNCTION EXTENDED trunc; + +--test string format +EXPLAIN +SELECT TRUNC('2014-01-01', 'MM'), + TRUNC('2014-01-14', 'MM'), + TRUNC('2014-01-31', 'MM'), + TRUNC('2014-02-02', 'MM'), + TRUNC('2014-02-28', 'MM'), + TRUNC('2016-02-03', 'MM'), + TRUNC('2016-02-28', 'MM'), + TRUNC('2016-02-29', 'MM'), + TRUNC('2014-01-01 10:30:45', 'MM'), + TRUNC('2014-01-14 10:30:45', 'MM'), + TRUNC('2014-01-31 10:30:45', 'MM'), + TRUNC('2014-02-02 10:30:45', 'MM'), + TRUNC('2014-02-28 10:30:45', 'MM'), + TRUNC('2016-02-03 10:30:45', 'MM'), + TRUNC('2016-02-28 10:30:45', 'MM'), + TRUNC('2016-02-29 10:30:45', 'MM') +FROM src tablesample (1 rows); + +SELECT TRUNC('2014-01-01', 'MM'), + TRUNC('2014-01-14', 'MM'), + TRUNC('2014-01-31', 'MM'), + TRUNC('2014-02-02', 'MM'), + TRUNC('2014-02-28', 'MM'), + TRUNC('2016-02-03', 'MM'), + TRUNC('2016-02-28', 'MM'), + TRUNC('2016-02-29', 'MM'), + TRUNC('2014-01-01 10:30:45', 'MM'), + TRUNC('2014-01-14 10:30:45', 'MM'), + TRUNC('2014-01-31 10:30:45', 'MM'), + TRUNC('2014-02-02 10:30:45', 'MM'), + TRUNC('2014-02-28 10:30:45', 'MM'), + TRUNC('2016-02-03 10:30:45', 'MM'), + TRUNC('2016-02-28 10:30:45', 'MM'), + TRUNC('2016-02-29 10:30:45', 'MM') +FROM src tablesample (1 rows); + + +--test timestamp format +EXPLAIN +SELECT TRUNC(CAST('2014-01-01 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-01 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 10:30:45' AS TIMESTAMP), 'MM') +FROM src tablesample (1 rows); + +SELECT TRUNC(CAST('2014-01-01 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-01 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 10:30:45' AS TIMESTAMP), 'MM') +FROM src tablesample (1 rows); + + +--test timestamp format +EXPLAIN +SELECT TRUNC(CAST('2014-01-01' AS DATE), 'MM'), + TRUNC(CAST('2014-01-14' AS DATE), 'MM'), + TRUNC(CAST('2014-01-31' AS DATE), 'MM'), + TRUNC(CAST('2014-02-02' AS DATE), 'MM'), + TRUNC(CAST('2014-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-03' AS DATE), 'MM'), + TRUNC(CAST('2016-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-29' AS DATE), 'MM') +FROM src tablesample (1 rows); + +SELECT TRUNC(CAST('2014-01-01' AS DATE), 'MM'), + TRUNC(CAST('2014-01-14' AS DATE), 'MM'), + TRUNC(CAST('2014-01-31' AS DATE), 'MM'), + TRUNC(CAST('2014-02-02' AS DATE), 'MM'), + TRUNC(CAST('2014-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-03' AS DATE), 'MM'), + TRUNC(CAST('2016-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-29' AS DATE), 'MM') +FROM src tablesample (1 rows); \ No newline at end of file diff --git a/ql/src/test/results/clientnegative/udf_trunc_error1.q.out b/ql/src/test/results/clientnegative/udf_trunc_error1.q.out new file mode 100644 index 0000000..fbde2d8 --- /dev/null +++ b/ql/src/test/results/clientnegative/udf_trunc_error1.q.out @@ -0,0 +1 @@ +FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments ''M'': org.apache.hadoop.hive.ql.exec.UDFArgumentException: TRUNC() only takes 'MM' format, got 'M' diff --git a/ql/src/test/results/clientnegative/udf_trunc_error2.q.out b/ql/src/test/results/clientnegative/udf_trunc_error2.q.out new file mode 100644 index 0000000..00a5599 --- /dev/null +++ b/ql/src/test/results/clientnegative/udf_trunc_error2.q.out @@ -0,0 +1 @@ +FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments '1': TRUNC() only takes STRING type as second argument, got INT diff --git a/ql/src/test/results/clientnegative/udf_trunc_error3.q.out b/ql/src/test/results/clientnegative/udf_trunc_error3.q.out new file mode 100644 index 0000000..95ce27a --- /dev/null +++ b/ql/src/test/results/clientnegative/udf_trunc_error3.q.out @@ -0,0 +1 @@ +FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments 'TOK_NULL': TRUNC() only takes STRING type as second argument, got VOID diff --git a/ql/src/test/results/clientnegative/udf_trunc_error4.q.out b/ql/src/test/results/clientnegative/udf_trunc_error4.q.out new file mode 100644 index 0000000..8b32083 --- /dev/null +++ b/ql/src/test/results/clientnegative/udf_trunc_error4.q.out @@ -0,0 +1 @@ +FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments ''MM'': TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got DOUBLE diff --git a/ql/src/test/results/clientnegative/udf_trunc_error5.q.out b/ql/src/test/results/clientnegative/udf_trunc_error5.q.out new file mode 100644 index 0000000..cc86e53 --- /dev/null +++ b/ql/src/test/results/clientnegative/udf_trunc_error5.q.out @@ -0,0 +1 @@ +FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments ''MM'': TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got VOID diff --git a/ql/src/test/results/clientpositive/show_functions.q.out b/ql/src/test/results/clientpositive/show_functions.q.out index e21b54b..1811ed6 100644 --- a/ql/src/test/results/clientpositive/show_functions.q.out +++ b/ql/src/test/results/clientpositive/show_functions.q.out @@ -184,6 +184,7 @@ to_unix_timestamp to_utc_timestamp translate trim +trunc ucase unbase64 unhex diff --git a/ql/src/test/results/clientpositive/udf_trunc.q.out b/ql/src/test/results/clientpositive/udf_trunc.q.out new file mode 100644 index 0000000..0c88cea --- /dev/null +++ b/ql/src/test/results/clientpositive/udf_trunc.q.out @@ -0,0 +1,279 @@ +PREHOOK: query: DESCRIBE FUNCTION trunc +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION trunc +POSTHOOK: type: DESCFUNCTION +trunc(date, fmt) - Returns date with the day truncated to the first day of the month which the date belongs to. The fmt is 'MM'. +PREHOOK: query: DESCRIBE FUNCTION EXTENDED trunc +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION EXTENDED trunc +POSTHOOK: type: DESCFUNCTION +trunc(date, fmt) - Returns date with the day truncated to the first day of the month which the date belongs to. The fmt is 'MM'. +date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'. The time part of date is ignored. +Example: + > SELECT trunc('2009-01-12', 'MM') FROM src LIMIT 1; + '2009-01-01' +PREHOOK: query: --test string format +EXPLAIN +SELECT TRUNC('2014-01-01', 'MM'), + TRUNC('2014-01-14', 'MM'), + TRUNC('2014-01-31', 'MM'), + TRUNC('2014-02-02', 'MM'), + TRUNC('2014-02-28', 'MM'), + TRUNC('2016-02-03', 'MM'), + TRUNC('2016-02-28', 'MM'), + TRUNC('2016-02-29', 'MM'), + TRUNC('2014-01-01 10:30:45', 'MM'), + TRUNC('2014-01-14 10:30:45', 'MM'), + TRUNC('2014-01-31 10:30:45', 'MM'), + TRUNC('2014-02-02 10:30:45', 'MM'), + TRUNC('2014-02-28 10:30:45', 'MM'), + TRUNC('2016-02-03 10:30:45', 'MM'), + TRUNC('2016-02-28 10:30:45', 'MM'), + TRUNC('2016-02-29 10:30:45', 'MM') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +POSTHOOK: query: --test string format +EXPLAIN +SELECT TRUNC('2014-01-01', 'MM'), + TRUNC('2014-01-14', 'MM'), + TRUNC('2014-01-31', 'MM'), + TRUNC('2014-02-02', 'MM'), + TRUNC('2014-02-28', 'MM'), + TRUNC('2016-02-03', 'MM'), + TRUNC('2016-02-28', 'MM'), + TRUNC('2016-02-29', 'MM'), + TRUNC('2014-01-01 10:30:45', 'MM'), + TRUNC('2014-01-14 10:30:45', 'MM'), + TRUNC('2014-01-31 10:30:45', 'MM'), + TRUNC('2014-02-02 10:30:45', 'MM'), + TRUNC('2014-02-28 10:30:45', 'MM'), + TRUNC('2016-02-03 10:30:45', 'MM'), + TRUNC('2016-02-28 10:30:45', 'MM'), + TRUNC('2016-02-29 10:30:45', 'MM') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-02-01' (type: string), '2014-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string), '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-02-01' (type: string), '2014-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 + Statistics: Num rows: 500 Data size: 752000 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: SELECT TRUNC('2014-01-01', 'MM'), + TRUNC('2014-01-14', 'MM'), + TRUNC('2014-01-31', 'MM'), + TRUNC('2014-02-02', 'MM'), + TRUNC('2014-02-28', 'MM'), + TRUNC('2016-02-03', 'MM'), + TRUNC('2016-02-28', 'MM'), + TRUNC('2016-02-29', 'MM'), + TRUNC('2014-01-01 10:30:45', 'MM'), + TRUNC('2014-01-14 10:30:45', 'MM'), + TRUNC('2014-01-31 10:30:45', 'MM'), + TRUNC('2014-02-02 10:30:45', 'MM'), + TRUNC('2014-02-28 10:30:45', 'MM'), + TRUNC('2016-02-03 10:30:45', 'MM'), + TRUNC('2016-02-28 10:30:45', 'MM'), + TRUNC('2016-02-29 10:30:45', 'MM') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT TRUNC('2014-01-01', 'MM'), + TRUNC('2014-01-14', 'MM'), + TRUNC('2014-01-31', 'MM'), + TRUNC('2014-02-02', 'MM'), + TRUNC('2014-02-28', 'MM'), + TRUNC('2016-02-03', 'MM'), + TRUNC('2016-02-28', 'MM'), + TRUNC('2016-02-29', 'MM'), + TRUNC('2014-01-01 10:30:45', 'MM'), + TRUNC('2014-01-14 10:30:45', 'MM'), + TRUNC('2014-01-31 10:30:45', 'MM'), + TRUNC('2014-02-02 10:30:45', 'MM'), + TRUNC('2014-02-28 10:30:45', 'MM'), + TRUNC('2016-02-03 10:30:45', 'MM'), + TRUNC('2016-02-28 10:30:45', 'MM'), + TRUNC('2016-02-29 10:30:45', 'MM') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +2014-01-01 2014-01-01 2014-01-01 2014-02-01 2014-02-01 2016-02-01 2016-02-01 2016-02-01 2014-01-01 2014-01-01 2014-01-01 2014-02-01 2014-02-01 2016-02-01 2016-02-01 2016-02-01 +PREHOOK: query: --test timestamp format +EXPLAIN +SELECT TRUNC(CAST('2014-01-01 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-01 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 10:30:45' AS TIMESTAMP), 'MM') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +POSTHOOK: query: --test timestamp format +EXPLAIN +SELECT TRUNC(CAST('2014-01-01 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-01 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 10:30:45' AS TIMESTAMP), 'MM') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-02-01' (type: string), '2014-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string), '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-02-01' (type: string), '2014-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 + Statistics: Num rows: 500 Data size: 752000 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: SELECT TRUNC(CAST('2014-01-01 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-01 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 10:30:45' AS TIMESTAMP), 'MM') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT TRUNC(CAST('2014-01-01 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 00:00:00' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-01 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-14 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-01-31 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-02 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2014-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-03 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-28 10:30:45' AS TIMESTAMP), 'MM'), + TRUNC(CAST('2016-02-29 10:30:45' AS TIMESTAMP), 'MM') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +2014-01-01 2014-01-01 2014-01-01 2014-02-01 2014-02-01 2016-02-01 2016-02-01 2016-02-01 2014-01-01 2014-01-01 2014-01-01 2014-02-01 2014-02-01 2016-02-01 2016-02-01 2016-02-01 +PREHOOK: query: --test timestamp format +EXPLAIN +SELECT TRUNC(CAST('2014-01-01' AS DATE), 'MM'), + TRUNC(CAST('2014-01-14' AS DATE), 'MM'), + TRUNC(CAST('2014-01-31' AS DATE), 'MM'), + TRUNC(CAST('2014-02-02' AS DATE), 'MM'), + TRUNC(CAST('2014-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-03' AS DATE), 'MM'), + TRUNC(CAST('2016-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-29' AS DATE), 'MM') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +POSTHOOK: query: --test timestamp format +EXPLAIN +SELECT TRUNC(CAST('2014-01-01' AS DATE), 'MM'), + TRUNC(CAST('2014-01-14' AS DATE), 'MM'), + TRUNC(CAST('2014-01-31' AS DATE), 'MM'), + TRUNC(CAST('2014-02-02' AS DATE), 'MM'), + TRUNC(CAST('2014-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-03' AS DATE), 'MM'), + TRUNC(CAST('2016-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-29' AS DATE), 'MM') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-01-01' (type: string), '2014-02-01' (type: string), '2014-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string), '2016-02-01' (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Statistics: Num rows: 500 Data size: 376000 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: SELECT TRUNC(CAST('2014-01-01' AS DATE), 'MM'), + TRUNC(CAST('2014-01-14' AS DATE), 'MM'), + TRUNC(CAST('2014-01-31' AS DATE), 'MM'), + TRUNC(CAST('2014-02-02' AS DATE), 'MM'), + TRUNC(CAST('2014-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-03' AS DATE), 'MM'), + TRUNC(CAST('2016-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-29' AS DATE), 'MM') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT TRUNC(CAST('2014-01-01' AS DATE), 'MM'), + TRUNC(CAST('2014-01-14' AS DATE), 'MM'), + TRUNC(CAST('2014-01-31' AS DATE), 'MM'), + TRUNC(CAST('2014-02-02' AS DATE), 'MM'), + TRUNC(CAST('2014-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-03' AS DATE), 'MM'), + TRUNC(CAST('2016-02-28' AS DATE), 'MM'), + TRUNC(CAST('2016-02-29' AS DATE), 'MM') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +2014-01-01 2014-01-01 2014-01-01 2014-02-01 2014-02-01 2016-02-01 2016-02-01 2016-02-01 -- 1.9.3 (Apple Git-50)