Index: conf/hive-default.xml
===================================================================
--- conf/hive-default.xml (revision 1151571)
+++ conf/hive-default.xml (working copy)
@@ -1138,4 +1138,10 @@
by record readers
+
+ hive.time.default.timezone
+ America/Los_Angeles
+ Default timezone used to localize times after deserialization
+
+
Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1151571)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -457,6 +457,8 @@
HIVE_MAPPER_CANNOT_SPAN_MULTIPLE_PARTITIONS("hive.mapper.cannot.span.multiple.partitions", false),
HIVE_REWORK_MAPREDWORK("hive.rework.mapredwork", false),
HIVE_CONCATENATE_CHECK_INDEX ("hive.exec.concatenate.check.index", true),
+
+ HIVE_TIME_DEFAULT_TIMEZONE("hive.time.default.timezone", "America/Los_Angeles"),
;
public final String varname;
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (working copy)
@@ -37,6 +37,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Text;
@@ -48,7 +49,7 @@
/**
* Returns the digit represented by character b.
- *
+ *
* @param b
* The ascii code of the character
* @param radix
@@ -102,7 +103,7 @@
/**
* Convert a UTF-8 byte array to String.
- *
+ *
* @param bytes
* The byte[] containing the UTF-8 String.
* @param start
@@ -124,7 +125,7 @@
/**
* Write the bytes with special characters escaped.
- *
+ *
* @param escaped
* Whether the data should be written out in an escaped way.
* @param escapeChar
@@ -158,7 +159,7 @@
/**
* Write out the text representation of a Primitive Object to a UTF8 byte
* stream.
- *
+ *
* @param out
* The UTF8 byte OutputStream
* @param o
@@ -215,6 +216,11 @@
needsEscape);
break;
}
+ case TIMESTAMP: {
+ LazyTimestamp.writeUTF8(out,
+ ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o));
+ break;
+ }
default: {
throw new RuntimeException("Hive internal error.");
}
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java (revision 0)
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UnsupportedEncodingException;
+import java.sql.Timestamp;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
+
+/**
+ *
+ * LazyTimestamp.
+ * Serializes and deserializes a Timestamp in the JDBC timestamp format
+ *
+ * YYYY-MM-DD HH:MM:SS.[fff...]
+ *
+ */
+public class LazyTimestamp extends LazyPrimitive {
+ static final private Log LOG = LogFactory.getLog(LazyTimestamp.class);
+
+ public LazyTimestamp(LazyTimestampObjectInspector oi) {
+ super(oi);
+ data = new TimestampWritable();
+ }
+
+ public LazyTimestamp(LazyTimestamp copy) {
+ super(copy);
+ data = new TimestampWritable(copy.data);
+ }
+
+ /**
+ * Initilizes LazyTimestamp object by interpreting the input bytes
+ * as a JDBC timestamp string
+ *
+ * @param bytes
+ * @param start
+ * @param length
+ */
+ @Override
+ public void init(ByteArrayRef bytes, int start, int length) {
+ byte[] byteArr = bytes.getData();
+ char[] buf = new char[length];
+ for(int i = 0; i < length; i++) {
+ buf[i] = (char) byteArr[i + start];
+ }
+
+ String s = null;
+ try {
+ s = new String(bytes.getData(), start, length, "US-ASCII");
+ } catch (UnsupportedEncodingException e) {
+ LOG.error(e);
+ s = "";
+ }
+
+ Timestamp t;
+ if (s.compareTo("NULL") == 0) {
+ t = null;
+ } else {
+ t = Timestamp.valueOf(s);
+ }
+ data.set(t);
+ }
+
+ private static final String nullTimestamp = "NULL";
+
+ /**
+ * Writes a Timestamp in JDBC timestamp format to the output stream
+ * @param out
+ * The output stream
+ * @param i
+ * The Timestamp to write
+ * @throws IOException
+ */
+ public static void writeUTF8(OutputStream out, TimestampWritable i)
+ throws IOException {
+ if (i == null) {
+ // Serialize as time 0
+ out.write(TimestampWritable.nullBytes);
+ } else {
+ out.write(i.toString().getBytes("US-ASCII"));
+ }
+ }
+
+ @Override
+ public TimestampWritable getWritableObject() {
+ return data;
+ }
+}
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (working copy)
@@ -34,6 +34,7 @@
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -75,6 +76,8 @@
return new LazyDouble((LazyDoubleObjectInspector) oi);
case STRING:
return new LazyString((LazyStringObjectInspector) oi);
+ case TIMESTAMP:
+ return new LazyTimestamp((LazyTimestampObjectInspector) oi);
default:
throw new RuntimeException("Internal error: no LazyObject for " + p);
}
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java (revision 0)
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+
+public class LazyTimestampObjectInspector
+ extends AbstractPrimitiveLazyObjectInspector
+ implements TimestampObjectInspector {
+
+ protected LazyTimestampObjectInspector() {
+ super(PrimitiveObjectInspectorUtils.timestampTypeEntry);
+ }
+
+ public Object copyObject(Object o) {
+ return o == null ? null : new LazyTimestamp((LazyTimestamp) o);
+ }
+
+ public Timestamp getPrimitiveJavaObject(Object o) {
+ return o == null ? null : ((LazyTimestamp) o).getWritableObject().getTimestamp();
+ }
+}
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (working copy)
@@ -53,6 +53,8 @@
new LazyDoubleObjectInspector();
public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR =
new LazyVoidObjectInspector();
+ public static final LazyTimestampObjectInspector LAZY_TIMESTAMP_OBJECT_INSPECTOR =
+ new LazyTimestampObjectInspector();
static HashMap, LazyStringObjectInspector> cachedLazyStringObjectInspector =
new HashMap, LazyStringObjectInspector>();
@@ -93,6 +95,8 @@
return getLazyStringObjectInspector(escaped, escapeChar);
case VOID:
return LAZY_VOID_OBJECT_INSPECTOR;
+ case TIMESTAMP:
+ return LAZY_TIMESTAMP_OBJECT_INSPECTOR;
default:
throw new RuntimeException("Internal error: Cannot find ObjectInspector "
+ " for " + primitiveCategory);
Index: serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (working copy)
@@ -36,6 +36,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -52,6 +53,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -310,6 +312,17 @@
}
return r;
}
+ case TIMESTAMP:
+ TimestampWritable t = (reuse == null ? new TimestampWritable() :
+ (TimestampWritable) reuse);
+ byte[] bytes = new byte[8];
+
+ for (int i = 0; i < bytes.length; i++) {
+ bytes[i] = buffer.read(invert);
+ }
+ t.setBinarySortable(bytes, 0);
+ return t;
+
default: {
throw new RuntimeException("Unrecognized type: "
+ ptype.getPrimitiveCategory());
@@ -539,6 +552,15 @@
buffer.write((byte) 0, invert);
return;
}
+ case TIMESTAMP: {
+ TimestampObjectInspector toi = (TimestampObjectInspector) poi;
+ TimestampWritable t = toi.getPrimitiveWritableObject(o);
+ byte[] data = t.getBinarySortable();
+ for (int i = 0; i < data.length; i++) {
+ buffer.write(data[i], invert);
+ }
+ return;
+ }
default: {
throw new RuntimeException("Unrecognized type: "
+ poi.getPrimitiveCategory());
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -133,6 +134,8 @@
* bytes are used to store the size. So the offset is 4 and the size is
* computed by concating the first four bytes together. The first four bytes
* are defined with respect to the offset in the bytes arrays.
+ * For timestamp, if the first bit is 0, the record length is 4, otherwise
+ * a VInt begins at the 5th byte and its length is added to 4.
*
* @param objectInspector
* object inspector of the field
@@ -186,6 +189,13 @@
recordInfo.elementOffset = vInt.length;
recordInfo.elementSize = vInt.value;
break;
+ case TIMESTAMP:
+ recordInfo.elementOffset = 0;
+ recordInfo.elementSize = 4;
+ if(TimestampWritable.hasDecimal(bytes[offset])) {
+ recordInfo.elementSize += (byte) WritableUtils.decodeVIntSize(bytes[offset+4]);
+ }
+ break;
default: {
throw new RuntimeException("Unrecognized primitive type: "
+ primitiveCategory);
@@ -302,10 +312,14 @@
* @param l
* the long
*/
- public static void writeVLong(Output byteStream, long l) {
+ public static int writeVLongToByteArray(byte[] bytes, long l) {
+ return LazyBinaryUtils.writeVLongToByteArray(bytes, 0, l);
+ }
+
+ public static int writeVLongToByteArray(byte[] bytes, int offset, long l) {
if (l >= -112 && l <= 127) {
- byteStream.write((byte) l);
- return;
+ bytes[offset] = (byte) l;
+ return 1;
}
int len = -112;
@@ -320,17 +334,25 @@
len--;
}
- byteStream.write((byte) len);
+ bytes[offset] = (byte) len;
len = (len < -120) ? -(len + 120) : -(len + 112);
for (int idx = len; idx != 0; idx--) {
int shiftbits = (idx - 1) * 8;
long mask = 0xFFL << shiftbits;
- byteStream.write((byte) ((l & mask) >> shiftbits));
+ bytes[offset+1-(idx - len)] = (byte) ((l & mask) >> shiftbits);
}
+ return 1 + len;
}
+ private static byte[] vLongBytes = new byte[9];
+
+ public static void writeVLong(Output byteStream, long l) {
+ int len = LazyBinaryUtils.writeVLongToByteArray(vLongBytes, l);
+ byteStream.write(vLongBytes, 0, len);
+ }
+
static HashMap cachedLazyBinaryObjectInspector = new HashMap();
/**
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (working copy)
@@ -33,6 +33,7 @@
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -49,6 +50,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -237,6 +239,7 @@
int size = fields.size();
int lasti = 0;
byte nullByte = 0;
+
for (int i = 0; i < size; i++) {
// set bit to 1 if a field is not null
if (null != soi.getStructFieldData(obj, fields.get(i))) {
@@ -355,6 +358,12 @@
byteStream.write(data, 0, length);
return warnedOnceNullMapKey;
}
+ case TIMESTAMP: {
+ TimestampObjectInspector toi = (TimestampObjectInspector) poi;
+ TimestampWritable t = toi.getPrimitiveWritableObject(obj);
+ t.writeToByteStream(byteStream);
+ return warnedOnceNullMapKey;
+ }
default: {
throw new RuntimeException("Unrecognized type: "
+ poi.getPrimitiveCategory());
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java (revision 0)
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazybinary;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
+
+/**
+ * LazyBinaryTimestamp
+ * A LazyBinaryObject that encodes a java.sql.Timestamp 4 to 9 bytes.
+ *
+ */
+public class LazyBinaryTimestamp extends
+ LazyBinaryPrimitive {
+ static final Log LOG = LogFactory.getLog(LazyBinaryTimestamp.class);
+
+ LazyBinaryTimestamp(WritableTimestampObjectInspector oi) {
+ super(oi);
+ data = new TimestampWritable();
+ }
+
+ LazyBinaryTimestamp(LazyBinaryTimestamp copy) {
+ super(copy);
+ data = new TimestampWritable(copy.data);
+ }
+
+ /**
+ * Initializes LazyBinaryTimestamp object
+ * @param bytes
+ * @param start
+ * @param length
+ * If length is 4, no decimal bytes follow, otherwise read following bytes
+ * as VInt and reverse its value
+ */
+ @Override
+ public void init(ByteArrayRef bytes, int start, int length) {
+ data.set(bytes.getData(), start);
+ }
+}
Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (working copy)
@@ -35,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableVoidObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -69,6 +70,8 @@
return new LazyBinaryString((WritableStringObjectInspector) oi);
case VOID: // for NULL
return new LazyBinaryVoid((WritableVoidObjectInspector) oi);
+ case TIMESTAMP:
+ return new LazyBinaryTimestamp((WritableTimestampObjectInspector) oi);
default:
throw new RuntimeException("Internal error: no LazyBinaryObject for " + p);
}
Index: serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (working copy)
@@ -27,7 +27,7 @@
/**
* TypeInfoFactory can be used to create the TypeInfo object for any types.
- *
+ *
* TypeInfo objects are all read-only so we can reuse them easily.
* TypeInfoFactory has internal cache to make sure we don't create 2 TypeInfo
* objects that represents the same type.
@@ -62,6 +62,7 @@
public static final TypeInfo doubleTypeInfo = getPrimitiveTypeInfo(Constants.DOUBLE_TYPE_NAME);
public static final TypeInfo byteTypeInfo = getPrimitiveTypeInfo(Constants.TINYINT_TYPE_NAME);
public static final TypeInfo shortTypeInfo = getPrimitiveTypeInfo(Constants.SMALLINT_TYPE_NAME);
+ public static final TypeInfo timestampTypeInfo = getPrimitiveTypeInfo(Constants.TIMESTAMP_TYPE_NAME);
public static final TypeInfo unknownTypeInfo = getPrimitiveTypeInfo("unknown");
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (working copy)
@@ -31,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
/**
@@ -107,6 +108,10 @@
return new PrimitiveObjectInspectorConverter.StringConverter(
(PrimitiveObjectInspector) inputOI);
}
+ case TIMESTAMP:
+ return new PrimitiveObjectInspectorConverter.TimestampConverter(
+ (PrimitiveObjectInspector) inputOI,
+ (SettableTimestampObjectInspector) outputOI);
default:
throw new RuntimeException("Hive internal error: conversion of "
+ inputOI.getTypeName() + " to " + outputOI.getTypeName()
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (working copy)
@@ -27,7 +27,7 @@
* The primitive types supported by Hive.
*/
public static enum PrimitiveCategory {
- VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, UNKNOWN
+ VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, TIMESTAMP, UNKNOWN
};
/**
@@ -61,7 +61,7 @@
/**
* Get a copy of the Object in the same class, so the return value can be
* stored independently of the parameter.
- *
+ *
* If the Object is a Primitive Java Object, we just return the parameter
* since Primitive Java Object is immutable.
*/
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (working copy)
@@ -30,6 +30,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
@@ -41,6 +42,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.Text;
@@ -440,6 +442,10 @@
}
return r;
}
+ case TIMESTAMP:
+ TimestampWritable t = ((TimestampObjectInspector) poi)
+ .getPrimitiveWritableObject(o);
+ return t.hashCode();
default: {
throw new RuntimeException("Unknown type: "
+ poi.getPrimitiveCategory());
@@ -592,6 +598,13 @@
.compareTo(s2));
}
}
+ case TIMESTAMP: {
+ TimestampWritable t1 = ((TimestampObjectInspector) poi1)
+ .getPrimitiveWritableObject(o1);
+ TimestampWritable t2 = ((TimestampObjectInspector) poi2)
+ .getPrimitiveWritableObject(o2);
+ return t1.compareTo(t2);
+ }
default: {
throw new RuntimeException("Unknown type: "
+ poi1.getPrimitiveCategory());
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java (revision 0)
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+
+public interface SettableTimestampObjectInspector extends TimestampObjectInspector {
+
+ Object set(Object o, byte[] bytes, int offset);
+
+ Object set(Object o, Timestamp t);
+
+ Object set(Object o, TimestampWritable t);
+
+ Object create(byte[] bytes, int offset);
+
+ Object create (Timestamp t);
+}
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java (revision 0)
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+public interface TimestampObjectInspector extends PrimitiveObjectInspector {
+
+ TimestampWritable getPrimitiveWritableObject(Object o);
+
+ Timestamp getPrimitiveJavaObject(Object o);
+}
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java (revision 0)
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class JavaTimestampObjectInspector
+ extends AbstractPrimitiveJavaObjectInspector
+ implements SettableTimestampObjectInspector {
+
+ protected JavaTimestampObjectInspector() {
+ super(PrimitiveObjectInspectorUtils.timestampTypeEntry);
+ }
+
+ public TimestampWritable getPrimitiveWritableObject(Object o) {
+ return o == null ? null : new TimestampWritable((Timestamp) o);
+ }
+
+ @Override
+ public Timestamp getPrimitiveJavaObject(Object o) {
+ return o == null ? null : (Timestamp) o;
+ }
+
+ public Timestamp get(Object o) {
+ return (Timestamp) o;
+ }
+
+ public Object set(Object o, Timestamp value) {
+ ((Timestamp) o).setTime(value.getTime());
+ return o;
+ }
+
+ public Object set(Object o, byte[] bytes, int offset) {
+ TimestampWritable.setTimestamp((Timestamp) o, bytes, offset);
+ return o;
+ }
+
+ public Object set(Object o, TimestampWritable tw) {
+ Timestamp t = (Timestamp) o;
+ t.setTime(tw.getTimestamp().getTime());
+ t.setNanos(tw.getTimestamp().getNanos());
+ return t;
+ }
+
+ public Object create(Timestamp value) {
+ return new Timestamp(value.getTime());
+ }
+
+ public Object create(byte[] bytes, int offset) {
+ return TimestampWritable.createTimestamp(bytes, offset);
+ }
+}
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (working copy)
@@ -21,6 +21,7 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
@@ -28,6 +29,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -44,7 +46,7 @@
/**
* ObjectInspectorFactory is the primary way to create new ObjectInspector
* instances.
- *
+ *
* SerDe classes should call the static functions in this library to create an
* ObjectInspector to return to the caller of SerDe2.getObjectInspector().
*/
@@ -167,6 +169,9 @@
public static final PrimitiveTypeEntry shortTypeEntry = new PrimitiveTypeEntry(
PrimitiveCategory.SHORT, Constants.SMALLINT_TYPE_NAME, Short.TYPE,
Short.class, ShortWritable.class);
+ public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry(
+ PrimitiveCategory.TIMESTAMP, Constants.TIMESTAMP_TYPE_NAME, null,
+ Object.class, TimestampWritable.class);
// The following is a complex type for special handling
public static final PrimitiveTypeEntry unknownTypeEntry = new PrimitiveTypeEntry(
@@ -182,6 +187,7 @@
registerType(doubleTypeEntry);
registerType(byteTypeEntry);
registerType(shortTypeEntry);
+ registerType(timestampTypeEntry);
registerType(unknownTypeEntry);
}
@@ -341,6 +347,10 @@
.getPrimitiveWritableObject(o2);
return t1.equals(t2);
}
+ case TIMESTAMP: {
+ return ((TimestampObjectInspector) oi1).getPrimitiveWritableObject(o1)
+ .equals(((TimestampObjectInspector) oi2).getPrimitiveWritableObject(o2));
+ }
default:
return false;
}
@@ -367,6 +377,9 @@
return ((DoubleObjectInspector) oi).get(o);
case STRING:
return Double.valueOf(((StringObjectInspector) oi).getPrimitiveJavaObject(o));
+ case TIMESTAMP:
+ return ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)
+ .getDouble();
default:
throw new NumberFormatException();
}
@@ -437,6 +450,10 @@
result = s.length() != 0;
}
break;
+ case TIMESTAMP:
+ result = (((TimestampObjectInspector) oi)
+ .getPrimitiveWritableObject(o).getSeconds() != 0);
+ break;
default:
throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ oi.getTypeName());
@@ -513,6 +530,10 @@
}
break;
}
+ case TIMESTAMP:
+ result = (int) (((TimestampObjectInspector) oi)
+ .getPrimitiveWritableObject(o).getSeconds());
+ break;
default: {
throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ oi.getTypeName());
@@ -563,6 +584,10 @@
result = Long.parseLong(s);
}
break;
+ case TIMESTAMP:
+ result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)
+ .getSeconds();
+ break;
default:
throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ oi.getTypeName());
@@ -607,6 +632,9 @@
String s = soi.getPrimitiveJavaObject(o);
result = Double.parseDouble(s);
break;
+ case TIMESTAMP:
+ result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getDouble();
+ break;
default:
throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ oi.getTypeName());
@@ -664,6 +692,9 @@
StringObjectInspector soi = (StringObjectInspector) oi;
result = soi.getPrimitiveJavaObject(o);
break;
+ case TIMESTAMP:
+ result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).toString();
+ break;
default:
throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ oi.getTypeName());
@@ -671,6 +702,56 @@
return result;
}
+ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector oi) {
+ if (o == null) {
+ return null;
+ }
+
+ Timestamp result = null;
+ switch (oi.getPrimitiveCategory()) {
+ case VOID:
+ result = null;
+ break;
+ case BOOLEAN:
+ result = new Timestamp(((BooleanObjectInspector) oi).get(o) ? 1 : 0);
+ break;
+ case BYTE:
+ result = new Timestamp(((ByteObjectInspector) oi).get(o));
+ break;
+ case SHORT:
+ result = new Timestamp(((ShortObjectInspector) oi).get(o));
+ break;
+ case INT:
+ result = new Timestamp(((IntObjectInspector) oi).get(o));
+ break;
+ case LONG:
+ result = new Timestamp(((LongObjectInspector) oi).get(o));
+ break;
+ case FLOAT:
+ result = TimestampWritable.floatToTimestamp(((FloatObjectInspector) oi).get(o));
+ break;
+ case DOUBLE:
+ result = TimestampWritable.doubleToTimestamp(((DoubleObjectInspector) oi).get(o));
+ break;
+ case STRING:
+ StringObjectInspector soi = (StringObjectInspector) oi;
+ String s = soi.getPrimitiveJavaObject(o).trim();
+ try {
+ result = Timestamp.valueOf(s);
+ } catch (IllegalArgumentException e) {
+ result = null;
+ }
+ break;
+ case TIMESTAMP:
+ result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getTimestamp();
+ break;
+ default:
+ throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ + oi.getTypeName());
+ }
+ return result;
+ }
+
private PrimitiveObjectInspectorUtils() {
// prevent instantiation
}
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java (revision 0)
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class WritableTimestampObjectInspector extends
+ AbstractPrimitiveWritableObjectInspector implements
+ SettableTimestampObjectInspector {
+
+ public WritableTimestampObjectInspector() {
+ super(PrimitiveObjectInspectorUtils.timestampTypeEntry);
+ }
+
+ @Override
+ public TimestampWritable getPrimitiveWritableObject(Object o) {
+ return o == null ? null : (TimestampWritable) o;
+ }
+
+ public Timestamp getPrimitiveJavaObject(Object o) {
+ return o == null ? null : ((TimestampWritable) o).getTimestamp();
+ }
+
+ public Object copyObject(Object o) {
+ return o == null ? null : new TimestampWritable((TimestampWritable) o);
+ }
+
+ public Object set(Object o, byte[] bytes, int offset) {
+ ((TimestampWritable) o).set(bytes, offset);
+ return o;
+ }
+
+ public Object set(Object o, Timestamp t) {
+ ((TimestampWritable) o).set(t);
+ return o;
+ }
+
+ public Object set(Object o, TimestampWritable t) {
+ ((TimestampWritable) o).set(t);
+ return o;
+ }
+
+ public Object create(byte[] bytes, int offset) {
+ return new TimestampWritable(bytes, offset);
+ }
+
+ public Object create(Timestamp t) {
+ return new TimestampWritable(t);
+ }
+}
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (working copy)
@@ -28,7 +28,7 @@
/**
* PrimitiveObjectInspectorFactory is the primary way to create new
* PrimitiveObjectInspector instances.
- *
+ *
* The reason of having caches here is that ObjectInspector is because
* ObjectInspectors do not have an internal state - so ObjectInspectors with the
* same construction parameters should result in exactly the same
@@ -54,6 +54,8 @@
new JavaStringObjectInspector();
public static final JavaVoidObjectInspector javaVoidObjectInspector =
new JavaVoidObjectInspector();
+ public static final JavaTimestampObjectInspector javaTimestampObjectInspector =
+ new JavaTimestampObjectInspector();
public static final WritableBooleanObjectInspector writableBooleanObjectInspector =
new WritableBooleanObjectInspector();
@@ -73,6 +75,8 @@
new WritableStringObjectInspector();
public static final WritableVoidObjectInspector writableVoidObjectInspector =
new WritableVoidObjectInspector();
+ public static final WritableTimestampObjectInspector writableTimestampObjectInspector =
+ new WritableTimestampObjectInspector();
private static HashMap cachedPrimitiveWritableInspectorCache =
new HashMap();
@@ -95,6 +99,8 @@
writableStringObjectInspector);
cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.VOID,
writableVoidObjectInspector);
+ cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.TIMESTAMP,
+ writableTimestampObjectInspector);
}
private static HashMap cachedPrimitiveJavaInspectorCache =
@@ -118,11 +124,13 @@
javaStringObjectInspector);
cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.VOID,
javaVoidObjectInspector);
+ cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.TIMESTAMP,
+ javaTimestampObjectInspector);
}
/**
* Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory.
- *
+ *
* @param primitiveCategory
*/
public static AbstractPrimitiveWritableObjectInspector getPrimitiveWritableObjectInspector(
@@ -138,7 +146,7 @@
/**
* Returns the PrimitiveJavaObjectInspector for the PrimitiveCategory.
- *
+ *
* @param primitiveCategory
*/
public static AbstractPrimitiveJavaObjectInspector getPrimitiveJavaObjectInspector(
@@ -155,7 +163,7 @@
/**
* Returns an ObjectInspector for a primitive Class. The Class can be a Hive
* Writable class, or a Java Primitive Class.
- *
+ *
* A runtimeException will be thrown if the class is not recognized as a
* primitive type by Hive.
*/
Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (working copy)
@@ -18,11 +18,13 @@
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+import java.sql.Timestamp;
+
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.io.Text;
/**
@@ -234,13 +236,34 @@
}
}
+ public static class TimestampConverter implements Converter {
+ PrimitiveObjectInspector inputOI;
+ SettableTimestampObjectInspector outputOI;
+ Object r;
+
+ public TimestampConverter(PrimitiveObjectInspector inputOI,
+ SettableTimestampObjectInspector outputOI) {
+ this.inputOI = inputOI;
+ this.outputOI = outputOI;
+ r = outputOI.create(new Timestamp(0));
+ }
+
+ public Object convert(Object input) {
+ if (input == null) {
+ return null;
+ }
+ return outputOI.set(r, PrimitiveObjectInspectorUtils.getTimestamp(input,
+ inputOI));
+ }
+ }
+
/**
* A helper class to convert any primitive to Text.
*/
public static class TextConverter implements Converter {
- private PrimitiveObjectInspector inputOI;
- private Text t = new Text();
- private ByteStream.Output out = new ByteStream.Output();
+ private final PrimitiveObjectInspector inputOI;
+ private final Text t = new Text();
+ private final ByteStream.Output out = new ByteStream.Output();
private static byte[] trueBytes = {'T', 'R', 'U', 'E'};
private static byte[] falseBytes = {'F', 'A', 'L', 'S', 'E'};
@@ -291,6 +314,10 @@
case STRING:
t.set(((StringObjectInspector) inputOI).getPrimitiveJavaObject(input));
return t;
+ case TIMESTAMP:
+ t.set(((TimestampObjectInspector) inputOI)
+ .getPrimitiveWritableObject(input).toString());
+ return t;
default:
throw new RuntimeException("Hive 2 Internal error: type = " + inputOI.getTypeName());
}
Index: serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (revision 1151571)
+++ serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (working copy)
@@ -39,6 +39,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
/**
* SerDeUtils.
@@ -254,6 +255,13 @@
sb.append('"');
break;
}
+ case TIMESTAMP: {
+ sb.append('"');
+ sb.append(((TimestampObjectInspector) poi)
+ .getPrimitiveWritableObject(o));
+ sb.append('"');
+ break;
+ }
default:
throw new RuntimeException("Unknown primitive type: "
+ poi.getPrimitiveCategory());
Index: serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
===================================================================
--- serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java (revision 0)
+++ serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java (revision 0)
@@ -0,0 +1,537 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.math.BigDecimal;
+import java.sql.Timestamp;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.TimeZone;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+
+/**
+ * TimestampWritable
+ * Writable equivalent of java.sq.Timestamp
+ *
+ * Timestamps are of the format
+ * YYYY-MM-DD HH:MM:SS.[fff...]
+ *
+ * We encode Unix timestamp in seconds in 4 bytes, using the MSB to signify
+ * whether the timestamp has a fractional portion.
+ *
+ * The fractional portion is reversed, and encoded as a VInt
+ * so timestamps with less precision use fewer bytes.
+ *
+ * 0.1 -> 1
+ * 0.01 -> 10
+ * 0.001 -> 100
+ *
+ */
+public class TimestampWritable implements WritableComparable {
+ static final private Log LOG = LogFactory.getLog(TimestampWritable.class);
+
+ static final public byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
+
+ private static final int NO_DECIMAL_MASK = 0x7FFFFFFF;
+ private static final int HAS_DECIMAL_MASK = 0x80000000;
+
+ private static final DateFormat dateFormat =
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+ // Must be overridden with value from job conf before jobs begin
+ private static TimeZone defaultTimeZone = TimeZone.getDefault();
+
+ private Timestamp timestamp = new Timestamp(0);
+
+ /**
+ * true if data is stored in timestamp field rather than byte arrays.
+ * allows for lazy conversion to bytes when necessary
+ * false otherwise
+ */
+ private boolean bytesEmpty;
+ private boolean timestampEmpty;
+
+ /* Allow use of external byte[] for efficiency */
+ private byte[] currentBytes;
+ private final byte[] internalBytes = new byte[9];
+ private byte[] externalBytes;
+ private int offset;
+
+ /* Reused to read VInts */
+ static private final VInt vInt = new VInt();
+
+ /* Constructors */
+ public TimestampWritable() {
+ for(int i = 0; i < internalBytes.length; i++) {
+ internalBytes[0] = 0x0;
+ }
+ bytesEmpty = false;
+ currentBytes = internalBytes;
+ offset = 0;
+
+ clearTimestamp();
+ }
+
+ public TimestampWritable(byte[] bytes, int offset) {
+ set(bytes, offset);
+ }
+
+ public TimestampWritable(TimestampWritable t) {
+ this(t.getBytes(), 0);
+ }
+
+ public TimestampWritable(Timestamp t) {
+ set(t);
+ }
+
+ public void set(byte[] bytes, int offset) {
+ externalBytes = bytes;
+ this.offset = offset;
+ bytesEmpty = false;
+ currentBytes = externalBytes;
+
+ clearTimestamp();
+ }
+
+ public void set(Timestamp t) {
+ this.timestamp = t;
+ bytesEmpty = true;
+ timestampEmpty = false;
+ }
+
+ public void set(TimestampWritable t) {
+ if (t.bytesEmpty) {
+ set(t.getTimestamp());
+ return;
+ }
+ if (t.currentBytes == t.externalBytes) {
+ set(t.currentBytes, t.offset);
+ } else {
+ set(t.currentBytes, 0);
+ }
+ }
+
+ private void clearTimestamp() {
+ timestampEmpty = true;
+ }
+
+ public void writeToByteStream(Output byteStream) {
+ checkBytes();
+ byteStream.write(currentBytes, offset, getTotalLength());
+ }
+
+ /**
+ *
+ * @return seconds corresponding to this TimestampWritable
+ */
+ public int getSeconds() {
+ if (bytesEmpty) {
+ return (int) (timestamp.getTime() / 1000);
+ }
+ return TimestampWritable.getSeconds(currentBytes, offset);
+ }
+
+ /**
+ *
+ * @return nanoseconds in this TimestampWritable
+ */
+ public int getNanos() {
+ if (!timestampEmpty) {
+ return timestamp.getNanos();
+ }
+
+ return TimestampWritable.getNanos(currentBytes, offset+4);
+ }
+
+ /**
+ *
+ * @return length of serialized TimestampWritable data
+ */
+ private int getTotalLength() {
+ return 4 + getDecimalLength();
+ }
+
+ /**
+ *
+ * @return number of bytes the variable length decimal takes up
+ */
+ private int getDecimalLength() {
+ checkBytes();
+ return WritableUtils.decodeVIntSize(currentBytes[offset+4]);
+ }
+
+ public Timestamp getTimestamp() {
+ if (timestampEmpty) {
+ populateTimestamp();
+ }
+ return timestamp;
+ }
+
+ /**
+ * Used to create copies of objects
+ * @return a copy of the internal TimestampWritable byte[]
+ */
+ public byte[] getBytes() {
+ checkBytes();
+
+ int len = getTotalLength();
+ byte[] b = new byte[len];
+
+ copyBytes(b, 0, currentBytes, offset, len);
+
+ return b;
+ }
+
+ /**
+ * @return byte[] representation of TimestampWritable that is binary
+ * sortable (4 byte seconds, 4 bytes for nanoseconds)
+ */
+ public byte[] getBinarySortable() {
+ byte[] b = new byte[8];
+ int nanos = getNanos();
+ int seconds = HAS_DECIMAL_MASK | getSeconds();
+ intToBytes(seconds, b, 0);
+ intToBytes(nanos, b, 4);
+ return b;
+ }
+
+ /**
+ * Given a byte[] that has binary sortable data, initialize the internal
+ * structures to hold that data
+ * @param bytes
+ * @param offset
+ */
+ public void setBinarySortable(byte[] bytes, int offset) {
+ int seconds = bytesToInt(bytes, offset);
+ int nanos = bytesToInt(bytes, offset+4);
+ if (nanos == 0) {
+ seconds &= NO_DECIMAL_MASK;
+ } else {
+ seconds |= HAS_DECIMAL_MASK;
+ }
+ intToBytes(seconds, internalBytes, 0);
+ setNanosBytes(nanos, internalBytes, 4);
+ currentBytes = internalBytes;
+ this.offset = 0;
+ }
+
+ /**
+ * The data of TimestampWritable can be stored either in a byte[]
+ * or in a Timestamp object. Calling this method ensures that the byte[]
+ * is populated from the Timestamp object if previously empty.
+ */
+ private void checkBytes() {
+ if (bytesEmpty) {
+ // Populate byte[] from Timestamp
+ convertTimestampToBytes(timestamp, internalBytes, 0);
+ offset = 0;
+ currentBytes = internalBytes;
+ bytesEmpty = false;
+ }
+ }
+
+ /**
+ *
+ * @return double representation of the timestamp, accurate to nanoseconds
+ */
+ public double getDouble() {
+ double seconds, nanos;
+ if (bytesEmpty) {
+ seconds = timestamp.getTime() / 1000;
+ nanos = timestamp.getNanos();
+ } else {
+ seconds = getSeconds();
+ nanos = getNanos();
+ }
+ return seconds + ((double) nanos) / 1000000000;
+ }
+
+
+
+ public void readFields(DataInput in) throws IOException {
+ in.readFully(internalBytes, 0, 4);
+ if (TimestampWritable.hasDecimal(internalBytes[0])) {
+ in.readFully(internalBytes, 4, 1);
+ int len = (byte) WritableUtils.decodeVIntSize(internalBytes[4]);
+ in.readFully(internalBytes, 5, len-1);
+ }
+ currentBytes = internalBytes;
+ this.offset = 0;
+ }
+
+ public void write(OutputStream out) throws IOException {
+ checkBytes();
+ out.write(currentBytes, offset, getTotalLength());
+ }
+
+ public void write(DataOutput out) throws IOException {
+ write((OutputStream) out);
+ }
+
+ public int compareTo(Object o) {
+ TimestampWritable t = (TimestampWritable) o;
+ checkBytes();
+ int s1 = this.getSeconds();
+ int s2 = t.getSeconds();
+ if (s1 == s2) {
+ int n1 = this.getNanos();
+ int n2 = t.getNanos();
+ if (n1 == n2) {
+ return 0;
+ }
+ return n1 - n2;
+ } else {
+ return s1 - s2;
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ return compareTo(o) == 0;
+ }
+
+ @Override
+ public String toString() {
+ if (timestampEmpty) {
+ populateTimestamp();
+ }
+
+ String timestampString = timestamp.toString();
+ if (timestampString.length() > 19) {
+ if (timestampString.length() == 21) {
+ if (timestampString.substring(19).compareTo(".0") == 0) {
+ return dateFormat.format(timestamp);
+ }
+ }
+ return dateFormat.format(timestamp) + timestampString.substring(19);
+ }
+
+ return dateFormat.format(timestamp);
+ }
+
+ @Override
+ public int hashCode() {
+ long seconds = getSeconds();
+ seconds <<= 32;
+ seconds |= getNanos();
+ return (int) ((seconds >>> 32) ^ seconds);
+ }
+
+ private void populateTimestamp() {
+ long seconds = getSeconds();
+ int nanos = getNanos();
+ timestamp.setTime(seconds * 1000);
+ timestamp.setNanos(nanos);
+ }
+
+ /** Static methods **/
+
+ public static void setDefaultTimeZone(TimeZone t) {
+ TimestampWritable.defaultTimeZone = t;
+ }
+
+ public static TimeZone getDefaultTimeZone() {
+ return TimestampWritable.defaultTimeZone;
+ }
+
+
+ /**
+ * Gets seconds stored as integer at bytes[offset]
+ * @param bytes
+ * @param offset
+ * @return
+ */
+ public static int getSeconds(byte[] bytes, int offset) {
+ return NO_DECIMAL_MASK & bytesToInt(bytes, offset);
+ }
+
+ public static int getNanos(byte[] bytes, int offset) {
+ LazyBinaryUtils.readVInt(bytes, offset, vInt);
+ int val = vInt.value;
+ int len = (int) Math.floor(Math.log10(val)) + 1;
+
+ // Reverse the value
+ int tmp = 0;
+ while (val != 0) {
+ tmp *= 10;
+ tmp += val % 10;
+ val /= 10;
+ }
+ val = tmp;
+
+ if (len < 9) {
+ val *= Math.pow(10, 9 - len);
+ }
+ return val;
+ }
+
+ /**
+ * Writes a Timestamp's serialized value to byte array b at
+ * @param t
+ * @param b
+ */
+ public static void convertTimestampToBytes(Timestamp t, byte[] b,
+ int offset) {
+ if (b.length < 9) {
+ LOG.error("byte array too short");
+ }
+ long millis = t.getTime();
+ int nanos = t.getNanos();
+
+ boolean hasDecimal = setNanosBytes(nanos, b, offset+4);
+ setSecondsBytes(millis, b, offset, hasDecimal);
+ }
+
+ /**
+ * Given an integer representing seconds, write its serialized
+ * value to the byte array b at offset
+ * @param millis
+ * @param b
+ * @param offset
+ * @param hasDecimal
+ */
+ private static void setSecondsBytes(long millis, byte[] b, int offset, boolean hasDecimal) {
+ int seconds = (int) (millis / 1000);
+
+ if (!hasDecimal) {
+ seconds &= NO_DECIMAL_MASK;
+ } else {
+ seconds |= HAS_DECIMAL_MASK;
+ }
+
+ intToBytes(seconds, b, offset);
+ }
+
+ /**
+ * Given an integer representing nanoseconds, write its serialized
+ * value to the byte array b at offset
+ *
+ * @param nanos
+ * @param b
+ * @param offset
+ * @return
+ */
+ private static boolean setNanosBytes(int nanos, byte[] b, int offset) {
+ int decimal = 0;
+ if (nanos != 0) {
+ int counter = 0;
+ while (counter < 9) {
+ decimal *= 10;
+ decimal += nanos % 10;
+ nanos /= 10;
+ counter++;
+ }
+ }
+
+ LazyBinaryUtils.writeVLongToByteArray(b, offset, decimal);
+ return decimal != 0;
+ }
+
+ /**
+ * Interprets a float as a unix timestamp and returns a Timestamp object
+ * @param f
+ * @return
+ */
+ public static Timestamp floatToTimestamp(float f) {
+ return doubleToTimestamp((double) f);
+ }
+
+ public static Timestamp doubleToTimestamp(double f) {
+ long seconds = (long) f;
+
+ // We must ensure the exactness of the double's fractional portion.
+ // 0.6 as the fraction part will be converted to 0.59999... and
+ // significantly reduce the savings from binary serializtion
+ BigDecimal bd = new BigDecimal(String.valueOf(f));
+ bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000));
+ int nanos = bd.intValue();
+
+ // Convert to millis
+ long millis = seconds * 1000;
+ Timestamp t = new Timestamp(millis);
+
+ // Set remaining fractional portion to nanos
+ t.setNanos(nanos);
+ return t;
+ }
+
+ public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
+ t.setTime(((long) TimestampWritable.getSeconds(bytes, offset)) * 1000);
+ t.setNanos(TimestampWritable.getNanos(bytes, offset+4));
+ }
+
+ public static Timestamp createTimestamp(byte[] bytes, int offset) {
+ Timestamp t = new Timestamp(0);
+ TimestampWritable.setTimestamp(t, bytes, offset);
+ return t;
+ }
+
+ /**
+ *
+ * @param b first byte in an encoded TimestampWritable
+ * @return true if it has a decimal portion, false otherwise
+ */
+ public static boolean hasDecimal(byte b) {
+ return (b >> 7) != 0;
+ }
+
+ private static void copyBytes(byte[] dest, int destOffset, byte[] src,
+ int srcOffset, int length) {
+ for (int i = 0; i < length; i++) {
+ dest[srcOffset+i] = src[srcOffset+i];
+ }
+ }
+
+ /**
+ * Writes value into dest at offset
+ * @param value
+ * @param dest
+ * @param offset
+ */
+ private static void intToBytes(int value, byte[] dest, int offset) {
+ dest[offset] = (byte) ((value >> 24) & 0xFF);
+ dest[offset+1] = (byte) ((value >> 16) & 0xFF);
+ dest[offset+2] = (byte) ((value >> 8) & 0xFF);
+ dest[offset+3] = (byte) (value & 0xFF);
+ }
+
+ /**
+ *
+ * @param bytes
+ * @param offset
+ * @return integer represented by the four bytes in bytes
+ * beginning at offset
+ */
+ private static int bytesToInt(byte[] bytes, int offset) {
+ return ((0xFF & bytes[offset]) << 24)
+ | ((0xFF & bytes[offset+1]) << 16)
+ | ((0xFF & bytes[offset+2]) << 8)
+ | (0xFF & bytes[offset+3]);
+ }
+}
Index: ql/src/test/results/clientnegative/invalid_t_create1.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_t_create1.q.out (revision 1151571)
+++ ql/src/test/results/clientnegative/invalid_t_create1.q.out (working copy)
@@ -1 +1 @@
-FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead
+FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead
Index: ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (revision 1151571)
+++ ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (working copy)
@@ -1 +1 @@
-FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead
+FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead
Index: ql/src/test/results/clientnegative/invalid_t_create2.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_t_create2.q.out (revision 1151571)
+++ ql/src/test/results/clientnegative/invalid_t_create2.q.out (working copy)
@@ -1 +1 @@
-FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead
+FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead
Index: ql/src/test/results/clientnegative/wrong_column_type.q.out
===================================================================
--- ql/src/test/results/clientnegative/wrong_column_type.q.out (revision 1151571)
+++ ql/src/test/results/clientnegative/wrong_column_type.q.out (working copy)
@@ -3,4 +3,4 @@
POSTHOOK: query: CREATE TABLE dest1(a float)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@dest1
-FAILED: Error in semantic analysis: No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string)
+FAILED: Error in semantic analysis: No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp)
Index: ql/src/test/results/clientnegative/invalid_t_alter1.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_t_alter1.q.out (revision 1151571)
+++ ql/src/test/results/clientnegative/invalid_t_alter1.q.out (working copy)
@@ -3,4 +3,4 @@
POSTHOOK: query: CREATE TABLE alter_test (d STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_test
-FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead
+FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead
Index: ql/src/test/results/clientnegative/invalid_t_transform.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_t_transform.q.out (revision 1151571)
+++ ql/src/test/results/clientnegative/invalid_t_transform.q.out (working copy)
@@ -1 +1 @@
-FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead
+FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead
Index: ql/src/test/results/clientnegative/invalid_t_alter2.q.out
===================================================================
--- ql/src/test/results/clientnegative/invalid_t_alter2.q.out (revision 1151571)
+++ ql/src/test/results/clientnegative/invalid_t_alter2.q.out (working copy)
@@ -3,4 +3,4 @@
POSTHOOK: query: CREATE TABLE alter_test (d STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_test
-FAILED: Error in semantic analysis: DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead
+FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead
Index: ql/src/test/results/clientpositive/timestamp_3.q.out
===================================================================
--- ql/src/test/results/clientpositive/timestamp_3.q.out (revision 0)
+++ ql/src/test/results/clientpositive/timestamp_3.q.out (revision 0)
@@ -0,0 +1,117 @@
+PREHOOK: query: drop table timestamp_3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table timestamp_3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table timestamp_3 (t timestamp)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table timestamp_3 (t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_3
+PREHOOK: query: alter table timestamp_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: default@timestamp_3
+POSTHOOK: query: alter table timestamp_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: default@timestamp_3
+PREHOOK: query: insert overwrite table timestamp_3
+ select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_3
+POSTHOOK: query: insert overwrite table timestamp_3
+ select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_3
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-42_712_2101699211172053884/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-42_712_2101699211172053884/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-45_765_7701431663402206845/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-45_765_7701431663402206845/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+48
+PREHOOK: query: select cast(t as smallint) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-48_782_8290139959636610814/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-48_782_8290139959636610814/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+-31184
+PREHOOK: query: select cast(t as int) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-51_797_8902671576244167018/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-51_797_8902671576244167018/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+1304135216
+PREHOOK: query: select cast(t as bigint) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-54_798_1817155502241746935/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-54_798_1817155502241746935/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+1304135216
+PREHOOK: query: select cast(t as float) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-57_795_1565227465212020714/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-15-57_795_1565227465212020714/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+1.30413517E9
+PREHOOK: query: select cast(t as double) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-16-00_781_1914812716492368483/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-16-00_781_1914812716492368483/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+1.3041352164485E9
+PREHOOK: query: select cast(t as string) from timestamp_3 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-16-03_877_4595413304622996657/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_3 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-08_15-16-03_877_4595413304622996657/-mr-10000
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
+2011-04-29 20:46:56.4485
+PREHOOK: query: drop table timestamp_3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_3
+PREHOOK: Output: default@timestamp_3
+POSTHOOK: query: drop table timestamp_3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_3
+POSTHOOK: Output: default@timestamp_3
+POSTHOOK: Lineage: timestamp_3.t EXPRESSION []
Index: ql/src/test/results/clientpositive/timestamp_udf.q.out
===================================================================
--- ql/src/test/results/clientpositive/timestamp_udf.q.out (revision 0)
+++ ql/src/test/results/clientpositive/timestamp_udf.q.out (revision 0)
@@ -0,0 +1,194 @@
+PREHOOK: query: drop table timestamp_udf
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table timestamp_udf
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table timestamp_udf_string
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table timestamp_udf_string
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table timestamp_udf (t timestamp)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table timestamp_udf (t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_udf
+PREHOOK: query: create table timestamp_udf_string (t string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table timestamp_udf_string (t string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_udf_string
+PREHOOK: query: from src
+ insert overwrite table timestamp_udf
+ select '2011-05-06 07:08:09.1234567' limit 1
+ insert overwrite table timestamp_udf_string
+ select '2011-05-06 07:08:09.1234567' limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_udf
+PREHOOK: Output: default@timestamp_udf_string
+POSTHOOK: query: from src
+ insert overwrite table timestamp_udf
+ select '2011-05-06 07:08:09.1234567' limit 1
+ insert overwrite table timestamp_udf_string
+ select '2011-05-06 07:08:09.1234567' limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_udf
+POSTHOOK: Output: default@timestamp_udf_string
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+PREHOOK: query: -- Test UDFs with Timestamp input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+ weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+ from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-32-51_147_3505147910681598664/-mr-10000
+POSTHOOK: query: -- Test UDFs with Timestamp input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+ weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+ from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-32-51_147_3505147910681598664/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+1304690889 2011 5 6 6 18 7 8 9 2011-05-06
+PREHOOK: query: select date_add(t, 5), date_sub(t, 10)
+ from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-32-54_431_5186616022126966413/-mr-10000
+POSTHOOK: query: select date_add(t, 5), date_sub(t, 10)
+ from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-32-54_431_5186616022126966413/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-11 2011-04-26
+PREHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+ from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-32-57_457_4248837007364128869/-mr-10000
+POSTHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+ from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-32-57_457_4248837007364128869/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+0 3333 -3333
+PREHOOK: query: select utc_to_localized_timestamp(t)
+ from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-00_466_3138322265267448858/-mr-10000
+POSTHOOK: query: select utc_to_localized_timestamp(t)
+ from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-00_466_3138322265267448858/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 00:08:09.2464567
+PREHOOK: query: select utc_to_localized_timestamp(t, 'America/Chicago')
+ from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-03_529_2704729645306887667/-mr-10000
+POSTHOOK: query: select utc_to_localized_timestamp(t, 'America/Chicago')
+ from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-03_529_2704729645306887667/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 02:08:09.2464567
+PREHOOK: query: -- Test UDFs with string input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+ weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+ from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-06_756_3826069893909399299/-mr-10000
+POSTHOOK: query: -- Test UDFs with string input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+ weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+ from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-06_756_3826069893909399299/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+1304690889 2011 5 6 6 18 7 8 9 2011-05-06
+PREHOOK: query: select date_add(t, 5), date_sub(t, 10) from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-09_909_78606182962870810/-mr-10000
+POSTHOOK: query: select date_add(t, 5), date_sub(t, 10) from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-09_909_78606182962870810/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-11 2011-04-26
+PREHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+ from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-12_946_3464386838495090663/-mr-10000
+POSTHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+ from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-12_946_3464386838495090663/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+0 3333 -3333
+PREHOOK: query: select utc_to_localized_timestamp(t)
+ from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-15_993_4865905195266952711/-mr-10000
+POSTHOOK: query: select utc_to_localized_timestamp(t)
+ from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-15_993_4865905195266952711/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 00:08:09.2464567
+PREHOOK: query: select utc_to_localized_timestamp(t, 'America/Chicago')
+ from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-18_976_6207777001523793933/-mr-10000
+POSTHOOK: query: select utc_to_localized_timestamp(t, 'America/Chicago')
+ from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-27_18-33-18_976_6207777001523793933/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 02:08:09.2464567
+PREHOOK: query: drop table timestamp_udf
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: default@timestamp_udf
+POSTHOOK: query: drop table timestamp_udf
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: default@timestamp_udf
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+PREHOOK: query: drop table timestamp_udf_string
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: default@timestamp_udf_string
+POSTHOOK: query: drop table timestamp_udf_string
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: default@timestamp_udf_string
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
Index: ql/src/test/results/clientpositive/timestamp_comparison.q.out
===================================================================
--- ql/src/test/results/clientpositive/timestamp_comparison.q.out (revision 0)
+++ ql/src/test/results/clientpositive/timestamp_comparison.q.out (revision 0)
@@ -0,0 +1,99 @@
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-32_519_8364837448470167055/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-32_519_8364837448470167055/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-35_762_374402174619403002/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-35_762_374402174619403002/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) =
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-38_788_5635730866062139704/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) =
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-38_788_5635730866062139704/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <>
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-41_803_7173891637306783676/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <>
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-41_803_7173891637306783676/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-44_748_5479207213327992908/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-44_748_5479207213327992908/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <=
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-47_676_4858015920608435969/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <=
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-47_676_4858015920608435969/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-50_625_9121503583543935508/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-50_625_9121503583543935508/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-53_594_2460043482331880840/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-53_594_2460043482331880840/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09.1000' as timestamp) =
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-56_524_8870776672820297370/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09.1000' as timestamp) =
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-56_524_8870776672820297370/-mr-10000
+true
Index: ql/src/test/results/clientpositive/show_functions.q.out
===================================================================
--- ql/src/test/results/clientpositive/show_functions.q.out (revision 1151571)
+++ ql/src/test/results/clientpositive/show_functions.q.out (working copy)
@@ -140,6 +140,7 @@
substring
sum
tan
+timestamp
tinyint
to_date
trim
Index: ql/src/test/results/clientpositive/timestamp_1.q.out
===================================================================
--- ql/src/test/results/clientpositive/timestamp_1.q.out (revision 0)
+++ ql/src/test/results/clientpositive/timestamp_1.q.out (revision 0)
@@ -0,0 +1,712 @@
+PREHOOK: query: drop table timestamp_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table timestamp_1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table timestamp_1 (t timestamp)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table timestamp_1 (t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_1
+PREHOOK: query: alter table timestamp_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: alter table timestamp_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: default@timestamp_1
+PREHOOK: query: insert overwrite table timestamp_1
+ select cast('2011-01-01 01:01:01' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: insert overwrite table timestamp_1
+ select cast('2011-01-01 01:01:01' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_1
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-15_673_4408273775422868450/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-15_673_4408273775422868450/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-18_746_4932103921783133339/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-18_746_4932103921783133339/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-21_844_2708388659244936027/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-21_844_2708388659244936027/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-24_813_5716956451308382894/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-24_813_5716956451308382894/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-27_803_3878179276965005826/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-27_803_3878179276965005826/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-30_804_2580978481528947425/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-30_804_2580978481528947425/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-33_798_3197380634598936966/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-33_798_3197380634598936966/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.293872461E9
+PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-36_812_6243303470519170009/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-36_812_6243303470519170009/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+2011-01-01 01:01:01
+PREHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_1
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-44_244_5130987512739326820/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-44_244_5130987512739326820/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-47_287_1896639484614005520/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-47_287_1896639484614005520/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-50_264_7576388328084060202/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-50_264_7576388328084060202/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-53_318_2662382568138024277/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-53_318_2662382568138024277/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-56_346_1338506489498661596/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-56_346_1338506489498661596/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-59_334_8696543905725057752/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-45-59_334_8696543905725057752/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-02_296_9027609210976973562/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-02_296_9027609210976973562/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.293872461E9
+PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-05_413_380961111206156155/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-05_413_380961111206156155/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+2011-01-01 01:01:01
+PREHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.1' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.1' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_1
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-13_125_2829877051145525726/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-13_125_2829877051145525726/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-16_102_221424930441519001/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-16_102_221424930441519001/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-19_100_3256033535416125006/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-19_100_3256033535416125006/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-22_141_7327077506146237178/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-22_141_7327077506146237178/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-25_118_9143022508800283596/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-25_118_9143022508800283596/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-28_133_4899745479711547176/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-28_133_4899745479711547176/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-31_158_6071620138802537791/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-31_158_6071620138802537791/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.2938724611E9
+PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-34_077_8863957917848291181/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-34_077_8863957917848291181/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+2011-01-01 01:01:01.1
+PREHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.0001' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.0001' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_1
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-42_422_5361315899348240189/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-42_422_5361315899348240189/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-45_399_7074034405805928504/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-45_399_7074034405805928504/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-48_365_2263022985271121719/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-48_365_2263022985271121719/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-51_436_4608699469856775030/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-51_436_4608699469856775030/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-54_392_6106265740670014927/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-54_392_6106265740670014927/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-57_367_8456247319189738989/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-46-57_367_8456247319189738989/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-00_366_7133266320643115499/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-00_366_7133266320643115499/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.2938724610001E9
+PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-03_370_1351556572087437830/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-03_370_1351556572087437830/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+2011-01-01 01:01:01.0001
+PREHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.000100000' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.000100000' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_1
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-10_610_5835184626602663461/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-10_610_5835184626602663461/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-13_688_5426752860397638090/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-13_688_5426752860397638090/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-16_966_6773207551250357174/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-16_966_6773207551250357174/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-19_930_5956276359267603771/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-19_930_5956276359267603771/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-22_978_248645095092367626/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-22_978_248645095092367626/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-25_954_6895029739153598575/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-25_954_6895029739153598575/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-28_872_4006824349208780207/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-28_872_4006824349208780207/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.2938724610001E9
+PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-31_857_9124887287659879381/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-31_857_9124887287659879381/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+2011-01-01 01:01:01.0001
+PREHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.001000011' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.001000011' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_1
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-39_097_2912851854986589105/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-39_097_2912851854986589105/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-42_183_6209542158732663786/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-42_183_6209542158732663786/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-45_151_3041031375905365547/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-45_151_3041031375905365547/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-48_115_378030164207951662/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-48_115_378030164207951662/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-51_092_6580623353477276765/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-51_092_6580623353477276765/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-54_446_1520269806265117369/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-54_446_1520269806265117369/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-57_416_9191166993746767552/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-47-57_416_9191166993746767552/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+1.293872461001E9
+PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-00_377_2365300502030781927/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-00_377_2365300502030781927/-mr-10000
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+2011-01-01 01:01:01.001000011
+PREHOOK: query: drop table timestamp_1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_1
+PREHOOK: Output: default@timestamp_1
+POSTHOOK: query: drop table timestamp_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_1
+POSTHOOK: Output: default@timestamp_1
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_1.t EXPRESSION []
Index: ql/src/test/results/clientpositive/timestamp_2.q.out
===================================================================
--- ql/src/test/results/clientpositive/timestamp_2.q.out (revision 0)
+++ ql/src/test/results/clientpositive/timestamp_2.q.out (revision 0)
@@ -0,0 +1,712 @@
+PREHOOK: query: drop table timestamp_2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table timestamp_2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table timestamp_2 (t timestamp)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table timestamp_2 (t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_2
+PREHOOK: query: alter table timestamp_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: alter table timestamp_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: default@timestamp_2
+PREHOOK: query: insert overwrite table timestamp_2
+ select cast('2011-01-01 01:01:01' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: insert overwrite table timestamp_2
+ select cast('2011-01-01 01:01:01' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_2
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-09_254_1677780486046903577/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-09_254_1677780486046903577/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-12_174_8355246285977609781/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-12_174_8355246285977609781/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-15_187_1474177769686631179/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-15_187_1474177769686631179/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-18_135_6674553302810288601/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-18_135_6674553302810288601/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-21_147_7439441759500528969/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-21_147_7439441759500528969/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-24_157_8137342921467725858/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-24_157_8137342921467725858/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-27_115_7570510000112541754/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-27_115_7570510000112541754/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.293872461E9
+PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-30_149_7863201428865842908/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-30_149_7863201428865842908/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+2011-01-01 01:01:01
+PREHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_2
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-37_326_6078939667797496082/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-37_326_6078939667797496082/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-40_421_5280108096226979833/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-40_421_5280108096226979833/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-43_387_7406640498703670553/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-43_387_7406640498703670553/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-46_302_8260175709422432937/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-46_302_8260175709422432937/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-49_276_8483419191999330874/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-49_276_8483419191999330874/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-52_296_2341700485252035168/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-52_296_2341700485252035168/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-55_268_9219554481193433449/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-55_268_9219554481193433449/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.293872461E9
+PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-58_231_8887436648539780808/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-48-58_231_8887436648539780808/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+2011-01-01 01:01:01
+PREHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.1' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.1' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_2
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-05_930_3486792034390357603/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-05_930_3486792034390357603/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-08_943_7831453960584727401/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-08_943_7831453960584727401/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-11_943_4793990075197886013/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-11_943_4793990075197886013/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-14_870_3758505695282442742/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-14_870_3758505695282442742/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-17_883_2914296100063525715/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-17_883_2914296100063525715/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-20_934_3487178570616336367/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-20_934_3487178570616336367/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-23_902_4484213838609434018/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-23_902_4484213838609434018/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.2938724611E9
+PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-26_954_2336095754116315212/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-26_954_2336095754116315212/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+2011-01-01 01:01:01.1
+PREHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.0001' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.0001' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_2
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-34_653_4781838027058443331/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-34_653_4781838027058443331/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-38_304_1317016439907140832/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-38_304_1317016439907140832/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-41_275_9148893321464358248/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-41_275_9148893321464358248/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-44_254_5017580943982042165/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-44_254_5017580943982042165/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-47_208_6485422477749545271/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-47_208_6485422477749545271/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-50_163_1281533183229086880/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-50_163_1281533183229086880/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-53_184_8143474264472938500/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-53_184_8143474264472938500/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.2938724610001E9
+PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-56_154_7984370185735682521/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-49-56_154_7984370185735682521/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+2011-01-01 01:01:01.0001
+PREHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.000100000' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.000100000' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_2
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-03_569_5946080326619912392/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-03_569_5946080326619912392/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-06_591_5565829922065788266/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-06_591_5565829922065788266/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-09_607_2753204269820264462/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-09_607_2753204269820264462/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-13_240_3438652433892606992/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-13_240_3438652433892606992/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-16_215_8956544347440517321/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-16_215_8956544347440517321/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-19_172_5977493939798601065/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-19_172_5977493939798601065/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-22_179_704915874158342655/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-22_179_704915874158342655/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.2938724610001E9
+PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-25_227_2626758837651763278/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-25_227_2626758837651763278/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+2011-01-01 01:01:01.0001
+PREHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.001000011' from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.001000011' from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_2
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-32_419_7429589015864428572/-mr-10000
+POSTHOOK: query: select cast(t as boolean) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-32_419_7429589015864428572/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+true
+PREHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-35_412_4900591732686437088/-mr-10000
+POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-35_412_4900591732686437088/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+77
+PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-38_318_138268242299761700/-mr-10000
+POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-38_318_138268242299761700/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+-4787
+PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-41_280_7066940416696071705/-mr-10000
+POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-41_280_7066940416696071705/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-44_251_8058715338324634255/-mr-10000
+POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-44_251_8058715338324634255/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1293872461
+PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-47_224_5898144204382843545/-mr-10000
+POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-47_224_5898144204382843545/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.29387251E9
+PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-50_190_2151309129086457111/-mr-10000
+POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-50_190_2151309129086457111/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+1.293872461001E9
+PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-53_108_1539319794662532942/-mr-10000
+POSTHOOK: query: select cast(t as string) from timestamp_2 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-01_12-50-53_108_1539319794662532942/-mr-10000
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+2011-01-01 01:01:01.001000011
+PREHOOK: query: drop table timestamp_2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_2
+PREHOOK: Output: default@timestamp_2
+POSTHOOK: query: drop table timestamp_2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_2
+POSTHOOK: Output: default@timestamp_2
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_2.t EXPRESSION []
Index: ql/src/test/queries/clientnegative/invalid_t_create3.q
===================================================================
--- ql/src/test/queries/clientnegative/invalid_t_create3.q (revision 1151571)
+++ ql/src/test/queries/clientnegative/invalid_t_create3.q (working copy)
@@ -1 +0,0 @@
-CREATE TABLE timestamp_test (d TIMESTAMP);
Index: ql/src/test/queries/clientpositive/timestamp_1.q
===================================================================
--- ql/src/test/queries/clientpositive/timestamp_1.q (revision 0)
+++ ql/src/test/queries/clientpositive/timestamp_1.q (revision 0)
@@ -0,0 +1,72 @@
+drop table timestamp_1;
+
+create table timestamp_1 (t timestamp);
+alter table timestamp_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe';
+
+insert overwrite table timestamp_1
+ select cast('2011-01-01 01:01:01' as timestamp) from src limit 1;
+select cast(t as boolean) from timestamp_1 limit 1;
+select cast(t as tinyint) from timestamp_1 limit 1;
+select cast(t as smallint) from timestamp_1 limit 1;
+select cast(t as int) from timestamp_1 limit 1;
+select cast(t as bigint) from timestamp_1 limit 1;
+select cast(t as float) from timestamp_1 limit 1;
+select cast(t as double) from timestamp_1 limit 1;
+select cast(t as string) from timestamp_1 limit 1;
+
+insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01' from src limit 1;
+select cast(t as boolean) from timestamp_1 limit 1;
+select cast(t as tinyint) from timestamp_1 limit 1;
+select cast(t as smallint) from timestamp_1 limit 1;
+select cast(t as int) from timestamp_1 limit 1;
+select cast(t as bigint) from timestamp_1 limit 1;
+select cast(t as float) from timestamp_1 limit 1;
+select cast(t as double) from timestamp_1 limit 1;
+select cast(t as string) from timestamp_1 limit 1;
+
+insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.1' from src limit 1;
+select cast(t as boolean) from timestamp_1 limit 1;
+select cast(t as tinyint) from timestamp_1 limit 1;
+select cast(t as smallint) from timestamp_1 limit 1;
+select cast(t as int) from timestamp_1 limit 1;
+select cast(t as bigint) from timestamp_1 limit 1;
+select cast(t as float) from timestamp_1 limit 1;
+select cast(t as double) from timestamp_1 limit 1;
+select cast(t as string) from timestamp_1 limit 1;
+
+insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.0001' from src limit 1;
+select cast(t as boolean) from timestamp_1 limit 1;
+select cast(t as tinyint) from timestamp_1 limit 1;
+select cast(t as smallint) from timestamp_1 limit 1;
+select cast(t as int) from timestamp_1 limit 1;
+select cast(t as bigint) from timestamp_1 limit 1;
+select cast(t as float) from timestamp_1 limit 1;
+select cast(t as double) from timestamp_1 limit 1;
+select cast(t as string) from timestamp_1 limit 1;
+
+insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.000100000' from src limit 1;
+select cast(t as boolean) from timestamp_1 limit 1;
+select cast(t as tinyint) from timestamp_1 limit 1;
+select cast(t as smallint) from timestamp_1 limit 1;
+select cast(t as int) from timestamp_1 limit 1;
+select cast(t as bigint) from timestamp_1 limit 1;
+select cast(t as float) from timestamp_1 limit 1;
+select cast(t as double) from timestamp_1 limit 1;
+select cast(t as string) from timestamp_1 limit 1;
+
+insert overwrite table timestamp_1
+ select '2011-01-01 01:01:01.001000011' from src limit 1;
+select cast(t as boolean) from timestamp_1 limit 1;
+select cast(t as tinyint) from timestamp_1 limit 1;
+select cast(t as smallint) from timestamp_1 limit 1;
+select cast(t as int) from timestamp_1 limit 1;
+select cast(t as bigint) from timestamp_1 limit 1;
+select cast(t as float) from timestamp_1 limit 1;
+select cast(t as double) from timestamp_1 limit 1;
+select cast(t as string) from timestamp_1 limit 1;
+
+drop table timestamp_1;
Index: ql/src/test/queries/clientpositive/timestamp_2.q
===================================================================
--- ql/src/test/queries/clientpositive/timestamp_2.q (revision 0)
+++ ql/src/test/queries/clientpositive/timestamp_2.q (revision 0)
@@ -0,0 +1,72 @@
+drop table timestamp_2;
+
+create table timestamp_2 (t timestamp);
+alter table timestamp_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe';
+
+insert overwrite table timestamp_2
+ select cast('2011-01-01 01:01:01' as timestamp) from src limit 1;
+select cast(t as boolean) from timestamp_2 limit 1;
+select cast(t as tinyint) from timestamp_2 limit 1;
+select cast(t as smallint) from timestamp_2 limit 1;
+select cast(t as int) from timestamp_2 limit 1;
+select cast(t as bigint) from timestamp_2 limit 1;
+select cast(t as float) from timestamp_2 limit 1;
+select cast(t as double) from timestamp_2 limit 1;
+select cast(t as string) from timestamp_2 limit 1;
+
+insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01' from src limit 1;
+select cast(t as boolean) from timestamp_2 limit 1;
+select cast(t as tinyint) from timestamp_2 limit 1;
+select cast(t as smallint) from timestamp_2 limit 1;
+select cast(t as int) from timestamp_2 limit 1;
+select cast(t as bigint) from timestamp_2 limit 1;
+select cast(t as float) from timestamp_2 limit 1;
+select cast(t as double) from timestamp_2 limit 1;
+select cast(t as string) from timestamp_2 limit 1;
+
+insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.1' from src limit 1;
+select cast(t as boolean) from timestamp_2 limit 1;
+select cast(t as tinyint) from timestamp_2 limit 1;
+select cast(t as smallint) from timestamp_2 limit 1;
+select cast(t as int) from timestamp_2 limit 1;
+select cast(t as bigint) from timestamp_2 limit 1;
+select cast(t as float) from timestamp_2 limit 1;
+select cast(t as double) from timestamp_2 limit 1;
+select cast(t as string) from timestamp_2 limit 1;
+
+insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.0001' from src limit 1;
+select cast(t as boolean) from timestamp_2 limit 1;
+select cast(t as tinyint) from timestamp_2 limit 1;
+select cast(t as smallint) from timestamp_2 limit 1;
+select cast(t as int) from timestamp_2 limit 1;
+select cast(t as bigint) from timestamp_2 limit 1;
+select cast(t as float) from timestamp_2 limit 1;
+select cast(t as double) from timestamp_2 limit 1;
+select cast(t as string) from timestamp_2 limit 1;
+
+insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.000100000' from src limit 1;
+select cast(t as boolean) from timestamp_2 limit 1;
+select cast(t as tinyint) from timestamp_2 limit 1;
+select cast(t as smallint) from timestamp_2 limit 1;
+select cast(t as int) from timestamp_2 limit 1;
+select cast(t as bigint) from timestamp_2 limit 1;
+select cast(t as float) from timestamp_2 limit 1;
+select cast(t as double) from timestamp_2 limit 1;
+select cast(t as string) from timestamp_2 limit 1;
+
+insert overwrite table timestamp_2
+ select '2011-01-01 01:01:01.001000011' from src limit 1;
+select cast(t as boolean) from timestamp_2 limit 1;
+select cast(t as tinyint) from timestamp_2 limit 1;
+select cast(t as smallint) from timestamp_2 limit 1;
+select cast(t as int) from timestamp_2 limit 1;
+select cast(t as bigint) from timestamp_2 limit 1;
+select cast(t as float) from timestamp_2 limit 1;
+select cast(t as double) from timestamp_2 limit 1;
+select cast(t as string) from timestamp_2 limit 1;
+
+drop table timestamp_2;
Index: ql/src/test/queries/clientpositive/timestamp_3.q
===================================================================
--- ql/src/test/queries/clientpositive/timestamp_3.q (revision 0)
+++ ql/src/test/queries/clientpositive/timestamp_3.q (revision 0)
@@ -0,0 +1,17 @@
+drop table timestamp_3;
+
+create table timestamp_3 (t timestamp);
+alter table timestamp_3 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe';
+
+insert overwrite table timestamp_3
+ select cast(cast('1.3041352164485E9' as double) as timestamp) from src limit 1;
+select cast(t as boolean) from timestamp_3 limit 1;
+select cast(t as tinyint) from timestamp_3 limit 1;
+select cast(t as smallint) from timestamp_3 limit 1;
+select cast(t as int) from timestamp_3 limit 1;
+select cast(t as bigint) from timestamp_3 limit 1;
+select cast(t as float) from timestamp_3 limit 1;
+select cast(t as double) from timestamp_3 limit 1;
+select cast(t as string) from timestamp_3 limit 1;
+
+drop table timestamp_3;
Index: ql/src/test/queries/clientpositive/timestamp_comparison.q
===================================================================
--- ql/src/test/queries/clientpositive/timestamp_comparison.q (revision 0)
+++ ql/src/test/queries/clientpositive/timestamp_comparison.q (revision 0)
@@ -0,0 +1,28 @@
+
+select cast('2011-05-06 07:08:09' as timestamp) >
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09' as timestamp) <
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09' as timestamp) =
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09' as timestamp) <>
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09' as timestamp) >=
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09' as timestamp) <=
+ cast('2011-05-06 07:08:09' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09' as timestamp) >=
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09' as timestamp) <
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1;
+
+select cast('2011-05-06 07:08:09.1000' as timestamp) =
+ cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1;
+
Index: ql/src/test/queries/clientpositive/timestamp_udf.q
===================================================================
--- ql/src/test/queries/clientpositive/timestamp_udf.q (revision 0)
+++ ql/src/test/queries/clientpositive/timestamp_udf.q (revision 0)
@@ -0,0 +1,46 @@
+drop table timestamp_udf;
+drop table timestamp_udf_string;
+
+create table timestamp_udf (t timestamp);
+create table timestamp_udf_string (t string);
+from src
+ insert overwrite table timestamp_udf
+ select '2011-05-06 07:08:09.1234567' limit 1
+ insert overwrite table timestamp_udf_string
+ select '2011-05-06 07:08:09.1234567' limit 1;
+
+-- Test UDFs with Timestamp input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+ weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+ from timestamp_udf;
+
+select date_add(t, 5), date_sub(t, 10)
+ from timestamp_udf;
+
+select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+ from timestamp_udf;
+
+select utc_to_localized_timestamp(t)
+ from timestamp_udf;
+
+select utc_to_localized_timestamp(t, 'America/Chicago')
+ from timestamp_udf;
+
+-- Test UDFs with string input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+ weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+ from timestamp_udf_string;
+
+select date_add(t, 5), date_sub(t, 10) from timestamp_udf_string;
+
+select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+ from timestamp_udf_string;
+
+select utc_to_localized_timestamp(t)
+ from timestamp_udf_string;
+
+select utc_to_localized_timestamp(t, 'America/Chicago')
+ from timestamp_udf_string;
+
+drop table timestamp_udf;
+drop table timestamp_udf_string;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy)
@@ -122,7 +122,6 @@
import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
import org.apache.hadoop.hive.ql.udf.UDFYear;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFBridge;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCollectSet;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFContextNGrams;
@@ -130,6 +129,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovariance;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovarianceSample;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFHistogramNumeric;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax;
@@ -147,13 +147,13 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArray;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArrayContains;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapAnd;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapOr;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapEmpty;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCoalesce;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcatWS;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapAnd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapEmpty;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapOr;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFElt;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFField;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
@@ -180,7 +180,9 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSplit;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStringToMap;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnion;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtcToLocalizedTimestamp;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode;
@@ -367,6 +369,9 @@
registerUDF(Constants.STRING_TYPE_NAME, UDFToString.class, false,
UDFToString.class.getSimpleName());
+ registerGenericUDF(Constants.TIMESTAMP_TYPE_NAME,
+ GenericUDFTimestamp.class);
+
// Aggregate functions
registerGenericUDAF("max", new GenericUDAFMax());
registerGenericUDAF("min", new GenericUDAFMin());
@@ -416,6 +421,7 @@
registerGenericUDF("concat_ws", GenericUDFConcatWS.class);
registerGenericUDF("array_contains", GenericUDFArrayContains.class);
registerGenericUDF("sentences", GenericUDFSentences.class);
+ registerGenericUDF("utc_to_localized_timestamp", GenericUDFUtcToLocalizedTimestamp.class);
// Generic UDTF's
registerGenericUDTF("explode", GenericUDTFExplode.class);
@@ -646,6 +652,11 @@
return true;
}
+ if (from.equals(TypeInfoFactory.timestampTypeInfo)
+ && to.equals(TypeInfoFactory.stringTypeInfo)) {
+ return true;
+ }
+
// Allow implicit conversion from Byte -> Integer -> Long -> Float -> Double
// -> String
Integer f = numericTypes.get(from);
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy)
@@ -458,6 +458,8 @@
Constants.DOUBLE_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_STRING,
Constants.STRING_TYPE_NAME);
+ conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
+ Constants.TIMESTAMP_TYPE_NAME);
}
public static boolean isRedundantConversionFunction(ASTNode expr,
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (working copy)
@@ -151,8 +151,8 @@
+ "Please check your hive.input.format setting and make sure your Hadoop version support "
+ "CombineFileInputFormat"),
NONEXISTPARTCOL("Non-Partition column appears in the partition specification: "),
- UNSUPPORTED_TYPE("DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use "
- + "STRING instead"),
+ UNSUPPORTED_TYPE("DATE and DATETIME types aren't supported yet. Please use "
+ + "TIMESTAMP instead"),
CREATE_NON_NATIVE_AS("CREATE TABLE AS SELECT cannot be used for a non-native table"),
LOAD_INTO_NON_NATIVE("A non-native table cannot be used as target for LOAD"),
LOCKMGR_NOT_SPECIFIED("Lock manager not specified correctly, set hive.lock.manager"),
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy)
@@ -27,6 +27,7 @@
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
+import java.util.TimeZone;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
@@ -53,6 +54,7 @@
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
@@ -224,6 +226,9 @@
idToTableNameMap = new HashMap();
inputs = new LinkedHashSet();
outputs = new LinkedHashSet();
+
+ TimestampWritable.setDefaultTimeZone(TimeZone.getTimeZone(
+ conf.getVar(HiveConf.ConfVars.HIVE_TIME_DEFAULT_TIMEZONE)));
} catch (Exception e) {
throw new SemanticException(e);
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy)
@@ -26,7 +26,6 @@
import java.io.Serializable;
import java.net.URI;
-import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@@ -34,9 +33,9 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
-import java.util.Map.Entry;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
@@ -59,8 +58,8 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.index.HiveIndex;
-import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType;
+import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -71,7 +70,9 @@
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
+import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
+import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
@@ -106,8 +107,6 @@
import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
-import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes;
-import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -140,9 +139,8 @@
}
public static String getTypeName(int token) throws SemanticException {
- // date, datetime, and timestamp types aren't currently supported
- if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME ||
- token == HiveParser.TOK_TIMESTAMP) {
+ // date and datetime types aren't currently supported
+ if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
}
return TokenToTypeName.get(token);
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -42,7 +43,7 @@
/**
* Convert from void to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The void value to convert
* @return Integer
@@ -53,7 +54,7 @@
/**
* Convert from boolean to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The boolean value to convert
* @return IntWritable
@@ -69,7 +70,7 @@
/**
* Convert from byte to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The byte value to convert
* @return IntWritable
@@ -85,7 +86,7 @@
/**
* Convert from short to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The short value to convert
* @return IntWritable
@@ -101,7 +102,7 @@
/**
* Convert from long to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The long value to convert
* @return IntWritable
@@ -117,7 +118,7 @@
/**
* Convert from float to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The float value to convert
* @return IntWritable
@@ -133,7 +134,7 @@
/**
* Convert from double to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The double value to convert
* @return IntWritable
@@ -149,7 +150,7 @@
/**
* Convert from string to an integer. This is called for CAST(... AS INT)
- *
+ *
* @param i
* The string value to convert
* @return IntWritable
@@ -171,4 +172,20 @@
}
}
+ /**
+ * Convert from Timestamp to an integer. This is called for CAST(... AS INT)
+ *
+ * @param i
+ * The Timestamp value to convert
+ * @return IntWritable
+ */
+ public IntWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ intWritable.set(i.getSeconds());
+ return intWritable;
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -32,8 +33,8 @@
* UDFYear.
*
*/
-@Description(name = "year",
- value = "_FUNC_(date) - Returns the year of date",
+@Description(name = "year",
+ value = "_FUNC_(date) - Returns the year of date",
extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
+ "'yyyy-MM-dd'.\n"
+ "Example:\n "
@@ -49,7 +50,7 @@
/**
* Get the year from a date string.
- *
+ *
* @param dateString
* the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -72,4 +73,14 @@
}
}
+ public IntWritable evaluate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+
+ calendar.setTime(t.getTimestamp());
+ result.set(calendar.get(Calendar.YEAR));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -32,9 +33,9 @@
* UDFWeekOfYear.
*
*/
-@Description(name = "yearweek",
+@Description(name = "yearweek",
value = "_FUNC_(date) - Returns the week of the year of the given date. A week "
- + "is considered to start on a Monday and week 1 is the first week with >3 days.",
+ + "is considered to start on a Monday and week 1 is the first week with >3 days.",
extended = "Examples:\n"
+ " > SELECT _FUNC_('2008-02-20') FROM src LIMIT 1;\n"
+ " 8\n"
@@ -52,7 +53,7 @@
/**
* Get the week of the year from a date string.
- *
+ *
* @param dateString
* the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -73,4 +74,14 @@
}
}
+ public IntWritable evaluate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+
+ calendar.setTime(t.getTimestamp());
+ result.set(calendar.get(Calendar.WEEK_OF_YEAR));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -42,7 +43,7 @@
/**
* Convert from void to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The void value to convert
* @return LongWritable
@@ -53,7 +54,7 @@
/**
* Convert from boolean to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The boolean value to convert
* @return LongWritable
@@ -69,7 +70,7 @@
/**
* Convert from byte to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The byte value to convert
* @return LongWritable
@@ -85,7 +86,7 @@
/**
* Convert from short to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The short value to convert
* @return LongWritable
@@ -101,7 +102,7 @@
/**
* Convert from integer to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The integer value to convert
* @return LongWritable
@@ -117,7 +118,7 @@
/**
* Convert from long to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The long value to convert
* @return LongWritable
@@ -128,7 +129,7 @@
/**
* Convert from float to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The float value to convert
* @return LongWritable
@@ -144,7 +145,7 @@
/**
* Convert from double to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The double value to convert
* @return LongWritable
@@ -160,7 +161,7 @@
/**
* Convert from string to a long. This is called for CAST(... AS BIGINT)
- *
+ *
* @param i
* The string value to convert
* @return LongWritable
@@ -182,4 +183,13 @@
}
}
+ public LongWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ longWritable.set(i.getSeconds());
+ return longWritable;
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyByte;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -42,7 +43,7 @@
/**
* Convert from void to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The void value to convert
* @return Byte
@@ -53,7 +54,7 @@
/**
* Convert from boolean to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The boolean value to convert
* @return Byte
@@ -69,7 +70,7 @@
/**
* Convert from short to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The short value to convert
* @return Byte
@@ -85,7 +86,7 @@
/**
* Convert from integer to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The integer value to convert
* @return Byte
@@ -101,7 +102,7 @@
/**
* Convert from long to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The long value to convert
* @return Byte
@@ -117,7 +118,7 @@
/**
* Convert from float to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The float value to convert
* @return Byte
@@ -133,7 +134,7 @@
/**
* Convert from double to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The double value to convert
* @return Byte
@@ -149,7 +150,7 @@
/**
* Convert from string to a byte. This is called for CAST(... AS TINYINT)
- *
+ *
* @param i
* The string value to convert
* @return Byte
@@ -171,4 +172,13 @@
}
}
+ public ByteWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ byteWritable.set((byte)i.getSeconds());
+ return byteWritable;
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (working copy)
@@ -22,13 +22,13 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
-
/**
* UDFToDouble.
*
@@ -41,7 +41,7 @@
/**
* Convert from void to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The void value to convert
* @return DoubleWritable
@@ -52,7 +52,7 @@
/**
* Convert from boolean to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The boolean value to convert
* @return DoubleWritable
@@ -68,7 +68,7 @@
/**
* Convert from boolean to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The byte value to convert
* @return DoubleWritable
@@ -84,7 +84,7 @@
/**
* Convert from short to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The short value to convert
* @return DoubleWritable
@@ -100,7 +100,7 @@
/**
* Convert from integer to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The integer value to convert
* @return DoubleWritable
@@ -116,7 +116,7 @@
/**
* Convert from long to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The long value to convert
* @return DoubleWritable
@@ -132,7 +132,7 @@
/**
* Convert from float to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The float value to convert
* @return DoubleWritable
@@ -148,7 +148,7 @@
/**
* Convert from string to a double. This is called for CAST(... AS DOUBLE)
- *
+ *
* @param i
* The string value to convert
* @return DoubleWritable
@@ -168,4 +168,19 @@
}
}
+ public DoubleWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ try {
+ doubleWritable.set(i.getDouble());
+ return doubleWritable;
+ } catch (NumberFormatException e) {
+ // MySQL returns 0 if the string is not a well-formed numeric value.
+ // But we decided to return NULL instead, which is more conservative.
+ return null;
+ }
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.Text;
/**
@@ -45,7 +46,7 @@
/**
* Get the date part of a date time string.
- *
+ *
* @param dateString
* the date string in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -66,4 +67,13 @@
}
}
+ public Text evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ }
+
+ t.set(formatter.format(i.getTimestamp()));
+ return t;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -52,7 +53,7 @@
/**
* Get the minute from a date string.
- *
+ *
* @param dateString
* the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -80,4 +81,14 @@
}
}
+ public IntWritable evaluate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+
+ calendar.setTime(t.getTimestamp());
+ result.set(calendar.get(Calendar.MINUTE));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateDiff.java (working copy)
@@ -20,10 +20,12 @@
import java.text.ParseException;
import java.text.SimpleDateFormat;
+import java.util.Date;
import java.util.TimeZone;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -42,7 +44,7 @@
public class UDFDateDiff extends UDF {
private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private IntWritable result = new IntWritable();
+ private final IntWritable result = new IntWritable();
public UDFDateDiff() {
formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
@@ -52,7 +54,7 @@
* Calculate the difference in the number of days. The time part of the string
* will be ignored. If dateString1 is earlier than dateString2, then the
* result can be negative.
- *
+ *
* @param dateString1
* the date string in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -62,22 +64,54 @@
* @return the difference in days.
*/
public IntWritable evaluate(Text dateString1, Text dateString2) {
+ return evaluate(toDate(dateString1), toDate(dateString2));
+ }
- if (dateString1 == null || dateString2 == null) {
+ public IntWritable evaluate(TimestampWritable t1, TimestampWritable t2) {
+ return evaluate(toDate(t1), toDate(t2));
+ }
+
+ public IntWritable evaluate(TimestampWritable t, Text dateString) {
+ return evaluate(toDate(t), toDate(dateString));
+ }
+
+ public IntWritable evaluate(Text dateString, TimestampWritable t) {
+ return evaluate(toDate(dateString), toDate(t));
+ }
+
+ private IntWritable evaluate(Date date, Date date2) {
+ if (date == null || date2 == null) {
return null;
}
+ // NOTE: This implementation avoids the extra-second problem
+ // by comparing with UTC epoch and integer division.
+ // 86400 is the number of seconds in a day
+ long diffInMilliSeconds = date.getTime() - date2.getTime();
+ result.set((int) (diffInMilliSeconds / (86400 * 1000)));
+ return result;
+ }
+
+ private Date format(String dateString) {
try {
- // NOTE: This implementation avoids the extra-second problem
- // by comparing with UTC epoch and integer division.
- long diffInMilliSeconds = (formatter.parse(dateString1.toString())
- .getTime() - formatter.parse(dateString2.toString()).getTime());
- // 86400 is the number of seconds in a day
- result.set((int) (diffInMilliSeconds / (86400 * 1000)));
- return result;
+ return formatter.parse(dateString);
} catch (ParseException e) {
return null;
}
}
+ private Date toDate(Text dateString) {
+ if (dateString == null) {
+ return null;
+ }
+ return format(dateString.toString());
+ }
+
+ private Date toDate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+ return t.getTimestamp();
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -53,7 +54,7 @@
/**
* Get the minute from a date string.
- *
+ *
* @param dateString
* the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -81,4 +82,14 @@
}
}
+ public IntWritable evaluate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+
+ calendar.setTime(t.getTimestamp());
+ result.set(calendar.get(Calendar.SECOND));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
@@ -41,7 +42,7 @@
/**
* Convert a void to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The value of a void type
* @return BooleanWritable
@@ -52,7 +53,7 @@
/**
* Convert from a byte to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The byte value to convert
* @return BooleanWritable
@@ -68,7 +69,7 @@
/**
* Convert from a short to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The short value to convert
* @return BooleanWritable
@@ -84,7 +85,7 @@
/**
* Convert from a integer to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The integer value to convert
* @return BooleanWritable
@@ -100,7 +101,7 @@
/**
* Convert from a long to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The long value to convert
* @return BooleanWritable
@@ -116,7 +117,7 @@
/**
* Convert from a float to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The float value to convert
* @return BooleanWritable
@@ -132,7 +133,7 @@
/**
* Convert from a double to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The double value to convert
* @return BooleanWritable
@@ -148,7 +149,7 @@
/**
* Convert from a string to boolean. This is called for CAST(... AS BOOLEAN)
- *
+ *
* @param i
* The string value to convert
* @return BooleanWritable
@@ -162,4 +163,13 @@
}
}
+ public BooleanWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ booleanWritable.set(i.getSeconds() != 0 || i.getNanos() != 0);
+ return booleanWritable;
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -52,11 +53,11 @@
/**
* Subtract a number of days to the date. The time part of the string will be
* ignored.
- *
+ *
* NOTE: This is a subset of what MySQL offers as:
* http://dev.mysql.com/doc/refman
* /5.1/en/date-and-time-functions.html#function_date-sub
- *
+ *
* @param dateString1
* the date string in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -81,4 +82,15 @@
}
}
+ public Text evaluate(TimestampWritable t, IntWritable days) {
+ if (t == null || days == null) {
+ return null;
+ }
+ calendar.setTime(t.getTimestamp());
+ calendar.add(Calendar.DAY_OF_MONTH, -days.get());
+ Date newDate = calendar.getTime();
+ result.set(formatter.format(newDate));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java (revision 0)
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+ *
+ * GenericUDFTimestamp
+ *
+ * Example usage:
+ * ... CAST( as TIMESTAMP) ...
+ *
+ * Creates a TimestampWritable object using PrimitiveObjectInspectorConverter
+ *
+ */
+public class GenericUDFTimestamp extends GenericUDF {
+
+ private PrimitiveObjectInspector argumentOI;
+ private TimestampConverter tc;
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ if (arguments.length < 1) {
+ throw new UDFArgumentLengthException(
+ "The function TIMESTAMP requires at least one argument, got "
+ + arguments.length);
+ }
+ try {
+ argumentOI = (PrimitiveObjectInspector) arguments[0];
+ } catch (ClassCastException e) {
+ throw new UDFArgumentException(
+ "The function TIMESTAMP takes only primitive types");
+ }
+
+ tc = new TimestampConverter(argumentOI,
+ PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
+ return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ Object o0 = arguments[0].get();
+ if (o0 == null) {
+ return null;
+ }
+
+ return tc.convert(o0);
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ assert (children.length == 1);
+ StringBuilder sb = new StringBuilder();
+ sb.append("CAST ");
+ sb.append(children[0]);
+ sb.append(" AS TIMESTAMP");
+ return sb.toString();
+ }
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (working copy)
@@ -60,6 +60,7 @@
case SHORT:
case INT:
case LONG:
+ case TIMESTAMP:
return new GenericUDAFSumLong();
case FLOAT:
case DOUBLE:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java (working copy)
@@ -55,6 +55,7 @@
case FLOAT:
case DOUBLE:
case STRING:
+ case TIMESTAMP:
return new GenericUDAFVarianceSampleEvaluator();
case BOOLEAN:
default:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (working copy)
@@ -55,6 +55,7 @@
case FLOAT:
case DOUBLE:
case STRING:
+ case TIMESTAMP:
return new GenericUDAFStdEvaluator();
case BOOLEAN:
default:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtcToLocalizedTimestamp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtcToLocalizedTimestamp.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtcToLocalizedTimestamp.java (revision 0)
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.sql.Timestamp;
+import java.util.TimeZone;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+
+public class GenericUDFUtcToLocalizedTimestamp extends GenericUDF {
+
+ private PrimitiveObjectInspector[] argumentOIs;
+ private TimestampConverter timestampConverter;
+ private TextConverter textConverter;
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ if (arguments.length < 1) {
+ throw new UDFArgumentLengthException(
+ "The function utc_to_localized_timestamp requires at least one "
+ + "argument, got " + arguments.length);
+ }
+ try {
+ LogFactory.getLog(GenericUDFUtcToLocalizedTimestamp.class).error(arguments[0].getClass());
+ argumentOIs = new PrimitiveObjectInspector[2];
+ argumentOIs[0] = (PrimitiveObjectInspector) arguments[0];
+ if (arguments.length > 1) {
+ argumentOIs[1] = (PrimitiveObjectInspector) arguments[1];
+ }
+ } catch (ClassCastException e) {
+ throw new UDFArgumentException(
+ "The function utc_to_localized_timestamp takes only primitive types");
+ }
+
+ timestampConverter = new TimestampConverter(argumentOIs[0],
+ PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
+ textConverter = new TextConverter(argumentOIs[1]);
+ return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ Object o0 = arguments[0].get();
+ TimeZone timezone = null;
+ if (o0 == null) {
+ return null;
+ }
+
+ if (arguments.length > 1 && arguments[1] != null) {
+ Text text = textConverter.convert(arguments[1].get());
+ if (text != null) {
+ timezone = TimeZone.getTimeZone(text.toString());
+ }
+ }
+
+ if (timezone == null) {
+ // Just use the default
+ timezone = TimestampWritable.getDefaultTimeZone();
+ }
+
+ Timestamp timestamp = ((TimestampWritable) timestampConverter.convert(o0))
+ .getTimestamp();
+
+ int offset = timezone.getOffset(timestamp.getTime());
+ long newTime = timestamp.getTime() + offset;
+ int nanos = (int) (timestamp.getNanos() + (newTime % 1000) * 1000000);
+ timestamp.setTime(newTime);
+ timestamp.setNanos(nanos);
+
+ return timestamp;
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ StringBuilder sb = new StringBuilder();
+ sb.append("Localized ");
+ sb.append(children[0]);
+ sb.append(" UTC using timezone: ");
+ if (children.length > 1) {
+ sb.append(children[1]);
+ } else {
+
+ }
+ return sb.toString();
+ }
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java (working copy)
@@ -91,6 +91,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
break;
default:
throw new UDFArgumentTypeException(0,
@@ -155,6 +156,7 @@
case SHORT:
case INT:
case LONG:
+ case TIMESTAMP:
break;
default:
throw new UDFArgumentTypeException(2, "Only an integer argument is accepted as "
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java (working copy)
@@ -71,6 +71,7 @@
case FLOAT:
case DOUBLE:
case STRING:
+ case TIMESTAMP:
return new GenericUDAFVarianceEvaluator();
case BOOLEAN:
default:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (working copy)
@@ -70,6 +70,7 @@
case FLOAT:
case DOUBLE:
case STRING:
+ case TIMESTAMP:
return new GenericUDAFAverageEvaluator();
case BOOLEAN:
default:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java (working copy)
@@ -66,6 +66,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
case BYTE:
case SHORT:
@@ -73,6 +74,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
return new GenericUDAFCovarianceSampleEvaluator();
case STRING:
case BOOLEAN:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java (working copy)
@@ -86,6 +86,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
break;
case STRING:
case BOOLEAN:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java (working copy)
@@ -101,6 +101,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
case BYTE:
case SHORT:
@@ -108,6 +109,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
return new GenericUDAFCorrelationEvaluator();
case STRING:
case BOOLEAN:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java (working copy)
@@ -132,6 +132,7 @@
case SHORT:
case INT:
case LONG:
+ case TIMESTAMP:
break;
default:
@@ -151,6 +152,7 @@
case SHORT:
case INT:
case LONG:
+ case TIMESTAMP:
break;
default:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java (working copy)
@@ -92,6 +92,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
case BYTE:
case SHORT:
@@ -99,6 +100,7 @@
case LONG:
case FLOAT:
case DOUBLE:
+ case TIMESTAMP:
return new GenericUDAFCovarianceEvaluator();
case STRING:
case BOOLEAN:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (working copy)
@@ -113,6 +113,7 @@
case SHORT:
case INT:
case LONG:
+ case TIMESTAMP:
break;
default:
@@ -130,6 +131,7 @@
case SHORT:
case INT:
case LONG:
+ case TIMESTAMP:
break;
default:
@@ -148,6 +150,7 @@
case SHORT:
case INT:
case LONG:
+ case TIMESTAMP:
break;
default:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (working copy)
@@ -54,6 +54,7 @@
case FLOAT:
case DOUBLE:
case STRING:
+ case TIMESTAMP:
return new GenericUDAFStdSampleEvaluator();
case BOOLEAN:
default:
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyShort;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -42,7 +43,7 @@
/**
* Convert from void to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The void value to convert
* @return ShortWritable
@@ -53,7 +54,7 @@
/**
* Convert from boolean to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The boolean value to convert
* @return ShortWritable
@@ -69,7 +70,7 @@
/**
* Convert from byte to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The byte value to convert
* @return ShortWritable
@@ -85,7 +86,7 @@
/**
* Convert from integer to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The integer value to convert
* @return ShortWritable
@@ -101,7 +102,7 @@
/**
* Convert from long to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The long value to convert
* @return ShortWritable
@@ -117,7 +118,7 @@
/**
* Convert from float to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The float value to convert
* @return ShortWritable
@@ -133,7 +134,7 @@
/**
* Convert from double to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The double value to convert
* @return ShortWritable
@@ -149,7 +150,7 @@
/**
* Convert from string to a short. This is called for CAST(... AS SMALLINT)
- *
+ *
* @param i
* The string value to convert
* @return ShortWritable
@@ -170,4 +171,14 @@
}
}
}
+
+ public ShortWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ shortWritable.set((short) i.getSeconds());
+ return shortWritable;
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
@@ -48,7 +49,7 @@
/**
* Return current UnixTime.
- *
+ *
* @return long Number of seconds from 1970-01-01 00:00:00
*/
public LongWritable evaluate() {
@@ -59,7 +60,7 @@
/**
* Convert time string to UnixTime.
- *
+ *
* @param dateText
* Time string in format yyyy-MM-dd HH:mm:ss
* @return long Number of seconds from 1970-01-01 00:00:00
@@ -82,7 +83,7 @@
/**
* Convert time string to UnixTime with user defined pattern.
- *
+ *
* @param dateText
* Time string in format patternstring
* @param patternText
@@ -104,4 +105,13 @@
return evaluate(dateText);
}
+
+ public LongWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ result.set(i.getSeconds());
+ return result;
+ }
+ }
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -49,7 +50,7 @@
/**
* Get the day of month from a date string.
- *
+ *
* @param dateString
* the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -72,4 +73,14 @@
}
}
+ public IntWritable evaluate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+
+ calendar.setTime(t.getTimestamp());
+ result.set(calendar.get(Calendar.DAY_OF_MONTH));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
@@ -41,7 +42,7 @@
/**
* Convert from void to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The void value to convert
* @return FloatWritable
@@ -52,7 +53,7 @@
/**
* Convert from boolean to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The boolean value to convert
* @return FloatWritable
@@ -68,7 +69,7 @@
/**
* Convert from byte to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The byte value to convert
* @return FloatWritable
@@ -84,7 +85,7 @@
/**
* Convert from short to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The short value to convert
* @return FloatWritable
@@ -100,7 +101,7 @@
/**
* Convert from integer to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The integer value to convert
* @return FloatWritable
@@ -116,7 +117,7 @@
/**
* Convert from long to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The long value to convert
* @return FloatWritable
@@ -132,7 +133,7 @@
/**
* Convert from double to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The double value to convert
* @return FloatWritable
@@ -148,7 +149,7 @@
/**
* Convert from string to a float. This is called for CAST(... AS FLOAT)
- *
+ *
* @param i
* The string value to convert
* @return FloatWritable
@@ -168,4 +169,19 @@
}
}
+ public FloatWritable evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ try {
+ floatWritable.set((float) i.getDouble());
+ return floatWritable;
+ } catch (NumberFormatException e) {
+ // MySQL returns 0 if the string is not a well-formed numeric value.
+ // But we decided to return NULL instead, which is more conservative.
+ return null;
+ }
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -52,7 +53,7 @@
/**
* Get the hour from a date string.
- *
+ *
* @param dateString
* the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -80,4 +81,14 @@
}
}
+ public IntWritable evaluate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+
+ calendar.setTime(t.getTimestamp());
+ result.set(calendar.get(Calendar.HOUR));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -47,7 +48,7 @@
/**
* Get the month from a date string.
- *
+ *
* @param dateString
* the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -68,4 +69,14 @@
}
}
+ public IntWritable evaluate(TimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+
+ calendar.setTime(t.getTimestamp());
+ result.set(1 + calendar.get(Calendar.MONTH));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -52,11 +53,11 @@
/**
* Add a number of days to the date. The time part of the string will be
* ignored.
- *
+ *
* NOTE: This is a subset of what MySQL offers as:
* http://dev.mysql.com/doc/refman
* /5.1/en/date-and-time-functions.html#function_date-add
- *
+ *
* @param dateString1
* the date string in the format of "yyyy-MM-dd HH:mm:ss" or
* "yyyy-MM-dd".
@@ -81,4 +82,15 @@
}
}
+ public Text evaluate(TimestampWritable t, IntWritable days) {
+ if (t == null || days == null) {
+ return null;
+ }
+ calendar.setTime(t.getTimestamp());
+ calendar.add(Calendar.DAY_OF_MONTH, days.get());
+ Date newDate = calendar.getTime();
+ result.set(formatter.format(newDate));
+ return result;
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (revision 1151571)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (working copy)
@@ -23,6 +23,7 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
import org.apache.hadoop.io.BooleanWritable;
@@ -122,4 +123,21 @@
}
}
+ public Text evaluate(Text i) {
+ if (i == null) {
+ return null;
+ }
+ i.set(i.toString());
+ return i;
+ }
+
+ public Text evaluate(TimestampWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ t.set(i.toString());
+ return t;
+ }
+ }
+
}