diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java index e777a8f..583f479 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java @@ -21,8 +21,8 @@ import java.text.ParseException; import java.text.SimpleDateFormat; import java.sql.Date; -import java.util.TimeZone; +import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -33,16 +33,20 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffScalarCol; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter; import org.apache.hadoop.io.IntWritable; +import javax.annotation.Nullable; + /** * UDFDateDiff. * @@ -98,6 +102,7 @@ public String getDisplayString(String[] children) { return getStandardDisplayString("datediff", children); } + @Nullable private Date convertToDate(PrimitiveCategory inputType, Converter converter, DeferredObject argument) throws HiveException { assert(converter != null); @@ -105,32 +110,31 @@ private Date convertToDate(PrimitiveCategory inputType, Converter converter, Def if (argument.get() == null) { return null; } - Date date = new Date(0); switch (inputType) { case STRING: case VARCHAR: case CHAR: String dateString = converter.convert(argument.get()).toString(); try { - date.setTime(formatter.parse(dateString).getTime()); + return new Date(formatter.parse(dateString).getTime()); } catch (ParseException e) { return null; } - break; case TIMESTAMP: Timestamp ts = ((TimestampWritable) converter.convert(argument.get())) .getTimestamp(); - date.setTime(ts.getTime()); - break; + new Date(ts.getTime()); case DATE: DateWritable dw = (DateWritable) converter.convert(argument.get()); - date = dw.get(); - break; + return dw.get(); + case TIMESTAMPLOCALTZ: + TimestampTZ tsz = ((TimestampLocalTZWritable) converter.convert(argument.get())) + .getTimestampTZ(); + return new Date(tsz.getEpochSecond() * 1000l); default: throw new UDFArgumentException( - "TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType); + "TO_DATE() only takes STRING/TIMESTAMP/TIMESTAMPLOCALTZ types, got " + inputType); } - return date; } private Converter checkArguments(ObjectInspector[] arguments, int i) throws UDFArgumentException { @@ -139,30 +143,30 @@ private Converter checkArguments(ObjectInspector[] arguments, int i) throws UDFA "Only primitive type arguments are accepted but " + arguments[i].getTypeName() + " is passed. as first arguments"); } - PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory(); - Converter converter; + final PrimitiveCategory inputType = + ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory(); switch (inputType) { case STRING: case VARCHAR: case CHAR: - converter = ObjectInspectorConverters.getConverter( - (PrimitiveObjectInspector) arguments[i], + return ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector); - break; case TIMESTAMP: - converter = new TimestampConverter((PrimitiveObjectInspector) arguments[i], + return new TimestampConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector); - break; + case TIMESTAMPLOCALTZ: + return new PrimitiveObjectInspectorConverter.TimestampLocalTZConverter( + (PrimitiveObjectInspector) arguments[i], + PrimitiveObjectInspectorFactory.writableTimestampTZObjectInspector + ); case DATE: - converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector)arguments[i], + return ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableDateObjectInspector); - break; default: throw new UDFArgumentException( - " DATEDIFF() only takes STRING/TIMESTAMP/DATEWRITABLE types as " + (i + 1) + " DATEDIFF() only takes STRING/TIMESTAMP/DATEWRITABLE/TIMESTAMPLOCALTZ types as " + (i + 1) + "-th argument, got " + inputType); } - return converter; } private IntWritable evaluate(Date date, Date date2) { diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java index 772967f..53dfae2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java @@ -22,7 +22,9 @@ import java.text.ParseException; import java.text.SimpleDateFormat; +import org.apache.calcite.util.TimestampWithTimeZoneString; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -40,6 +42,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; import org.apache.hadoop.io.LongWritable; @@ -56,6 +59,7 @@ private transient DateObjectInspector inputDateOI; private transient TimestampObjectInspector inputTimestampOI; + private transient TimestampLocalTZObjectInspector inputTimestampLocalTzOI; private transient Converter inputTextConverter; private transient Converter patternConverter; @@ -105,9 +109,13 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx case TIMESTAMP: inputTimestampOI = (TimestampObjectInspector) arguments[0]; break; + case TIMESTAMPLOCALTZ: + inputTimestampLocalTzOI = (TimestampLocalTZObjectInspector) arguments[0]; + break; default: - throw new UDFArgumentException( - "The function " + getName().toUpperCase() + " takes only string/date/timestamp types"); + throw new UDFArgumentException("The function " + getName().toUpperCase() + + " takes only string/date/timestamp/timestampwltz types. Got Type:" + arg1OI + .getPrimitiveCategory().name()); } } @@ -151,6 +159,11 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { retValue.set(inputDateOI.getPrimitiveWritableObject(arguments[0].get()) .getTimeInSeconds()); return retValue; + } else if (inputTimestampLocalTzOI != null) { + TimestampTZ timestampTZ = + inputTimestampLocalTzOI.getPrimitiveJavaObject(arguments[0].get()); + retValue.set(timestampTZ.getEpochSecond()); + return retValue; } Timestamp timestamp = inputTimestampOI.getPrimitiveJavaObject(arguments[0].get()); setValueFromTs(retValue, timestamp);