diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveTimestamp.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveTimestamp.java
new file mode 100644
index 0000000..83af06a
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveTimestamp.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.util.TimeZone;
+import java.util.concurrent.ThreadLocalRandom;
+
+public class TestHiveTimestamp {
+
+ private static TimeZone defaultTZ;
+ private static final String[] IDs = TimeZone.getAvailableIDs();
+
+ @BeforeClass
+ public static void storeDefaultTZ() {
+ defaultTZ = TimeZone.getDefault();
+ }
+
+ @Before
+ public void setTZ() {
+ int index = ThreadLocalRandom.current().nextInt(IDs.length);
+ TimeZone.setDefault(TimeZone.getTimeZone(IDs[index]));
+ }
+
+ @AfterClass
+ public static void restoreTZ() {
+ TimeZone.setDefault(defaultTZ);
+ }
+
+ @Test
+ public void testParse() {
+ String s1 = "2016-01-03 12:26:34.0123";
+ String s2 = s1 + " UTC";
+ Assert.assertEquals(s1 + " GMT", HiveTimestamp.valueOf(s2).toString());
+ Assert.assertEquals(s1 + " GMT+08:00", HiveTimestamp.valueOf(s1, "Asia/Shanghai").toString());
+ }
+
+ @Test
+ public void testHandleDST() {
+ // Same timezone can have different offset due to DST
+ String s1 = "2005-01-03 02:01:00";
+ Assert.assertEquals(s1 + ".0 GMT", HiveTimestamp.valueOf(s1, "Europe/London").toString());
+ String s2 = "2005-06-03 02:01:00.30547";
+ Assert.assertEquals(s2 + " GMT+01:00", HiveTimestamp.valueOf(s2, "Europe/London").toString());
+ // Can print time with DST properly
+ String s3 = "2005-04-03 02:01:00.04067";
+ Assert.assertEquals("2005-04-03 03:01:00.04067 GMT-07:00",
+ HiveTimestamp.valueOf(s3, "America/Los_Angeles").toString());
+ }
+
+ @Test
+ public void testBadZoneID() {
+ try {
+ new HiveTimestamp(0, "Foo id");
+ Assert.fail("Invalid timezone ID should cause exception");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+ }
+}
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index 93f093f..c5f0b5c 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -46,6 +46,7 @@
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
import org.apache.hadoop.hive.serde2.thrift.Type;
import org.apache.hive.service.cli.TableSchema;
@@ -441,6 +442,8 @@ private Object evaluate(Type type, Object value) {
return value;
case TIMESTAMP_TYPE:
return Timestamp.valueOf((String) value);
+ case TIMESTAMPTZ_TYPE:
+ return HiveTimestamp.valueOf((String) value);
case DECIMAL_TYPE:
return new BigDecimal((String)value);
case DATE_TYPE:
diff --git a/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java b/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
index 38918f0..ebff3c5 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.thrift.Type;
@@ -105,6 +106,8 @@ static String columnClassName(Type hiveType, JdbcColumnAttributes columnAttribut
return HiveIntervalYearMonth.class.getName();
case INTERVAL_DAY_TIME_TYPE:
return HiveIntervalDayTime.class.getName();
+ case TIMESTAMPTZ_TYPE:
+ return HiveTimestamp.class.getName();
default:
return String.class.getName();
}
@@ -142,6 +145,8 @@ static Type typeStringToHiveType(String type) throws SQLException {
return Type.DATE_TYPE;
} else if ("timestamp".equalsIgnoreCase(type)) {
return Type.TIMESTAMP_TYPE;
+ } else if ("timestamptz".equalsIgnoreCase(type)) {
+ return Type.TIMESTAMPTZ_TYPE;
} else if ("interval_year_month".equalsIgnoreCase(type)) {
return Type.INTERVAL_YEAR_MONTH_TYPE;
} else if ("interval_day_time".equalsIgnoreCase(type)) {
@@ -195,6 +200,8 @@ static String getColumnTypeName(String type) throws SQLException {
return serdeConstants.BIGINT_TYPE_NAME;
} else if ("timestamp".equalsIgnoreCase(type)) {
return serdeConstants.TIMESTAMP_TYPE_NAME;
+ } else if ("timestamptz".equalsIgnoreCase(type)) {
+ return serdeConstants.TIMESTAMPTZ_TYPE_NAME;
} else if ("date".equalsIgnoreCase(type)) {
return serdeConstants.DATE_TYPE_NAME;
} else if ("interval_year_month".equalsIgnoreCase(type)) {
@@ -305,6 +312,8 @@ static int columnPrecision(Type hiveType, JdbcColumnAttributes columnAttributes)
case INTERVAL_DAY_TIME_TYPE:
// -ddddddddd hh:mm:ss.nnnnnnnnn
return 29;
+ case TIMESTAMPTZ_TYPE:
+ return 39;
default:
return Integer.MAX_VALUE;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 69a18cd..1717221 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -367,6 +367,7 @@
system.registerGenericUDF(serdeConstants.DATE_TYPE_NAME, GenericUDFToDate.class);
system.registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME, GenericUDFTimestamp.class);
+ system.registerGenericUDF(serdeConstants.TIMESTAMPTZ_TYPE_NAME, GenericUDFToTimestampTZ.class);
system.registerGenericUDF(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, GenericUDFToIntervalYearMonth.class);
system.registerGenericUDF(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, GenericUDFToIntervalDayTime.class);
system.registerGenericUDF(serdeConstants.BINARY_TYPE_NAME, GenericUDFToBinary.class);
@@ -1460,7 +1461,8 @@ public static boolean isOpCast(GenericUDF genericUDF) {
udfClass == UDFToShort.class || udfClass == UDFToString.class ||
udfClass == GenericUDFToVarchar.class || udfClass == GenericUDFToChar.class ||
udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class ||
- udfClass == GenericUDFToDate.class || udfClass == GenericUDFToDecimal.class;
+ udfClass == GenericUDFToDate.class || udfClass == GenericUDFToDecimal.class ||
+ udfClass == GenericUDFToTimestampTZ.class;
}
/**
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
index f28d33e..e56fab1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
@@ -452,6 +452,8 @@ private int getSize(int pos, PrimitiveCategory category) {
return javaObjectOverHead;
case TIMESTAMP:
return javaObjectOverHead + javaSizePrimitiveType;
+ case TIMESTAMPTZ:
+ return javaObjectOverHead + 2 * javaSizePrimitiveType;
default:
return javaSizeUnknownType;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
index 7be628e..16e56a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
@@ -37,6 +37,7 @@
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.exec.tez.TezJobMonitor;
import org.apache.hadoop.hive.ql.exec.vector.VectorFileSinkOperator;
@@ -222,6 +223,7 @@ public Kryo create() {
KryoWithHooks kryo = new KryoWithHooks();
kryo.register(java.sql.Date.class, new SqlDateSerializer());
kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
+ kryo.register(HiveTimestamp.class, new HiveTimestampSerializer());
kryo.register(Path.class, new PathSerializer());
kryo.register(Arrays.asList("").getClass(), new ArraysAsListSerializer());
@@ -306,6 +308,30 @@ public void write(Kryo kryo, Output output, Timestamp ts) {
}
/**
+ * Kryo serializer for timestamptz.
+ */
+ private static class HiveTimestampSerializer extends
+ com.esotericsoftware.kryo.Serializer {
+
+ @Override
+ public void write(Kryo kryo, Output output, HiveTimestamp hiveTimestamp) {
+ output.writeLong(hiveTimestamp.getTime());
+ output.writeInt(hiveTimestamp.getNanos());
+ output.writeInt(hiveTimestamp.getOffsetInMin());
+ }
+
+ @Override
+ public HiveTimestamp read(Kryo kryo, Input input, Class aClass) {
+ long time = input.readLong();
+ int nanos = input.readInt();
+ int offset = input.readInt();
+ HiveTimestamp hts = new HiveTimestamp(time, offset);
+ hts.setNanos(nanos);
+ return hts;
+ }
+ }
+
+ /**
* Custom Kryo serializer for sql date, otherwise Kryo gets confused between
* java.sql.Date and java.util.Date while deserializing
*/
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
index ba41518..6a1fdd1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
@@ -176,6 +176,9 @@ public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtF
case TIMESTAMP:
convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
break;
+ case TIMESTAMPTZ:
+ convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
+ break;
case INTERVAL_YEAR_MONTH:
convertedType = dtFactory.createSqlIntervalType(
new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1,1)));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 9329e00..cf25af9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -200,6 +200,7 @@
TokenToTypeName.put(HiveParser.TOK_DATE, serdeConstants.DATE_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_DATETIME, serdeConstants.DATETIME_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME);
+ TokenToTypeName.put(HiveParser.TOK_TIMESTAMPTZ, serdeConstants.TIMESTAMPTZ_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_INTERVAL_YEAR_MONTH, serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_INTERVAL_DAY_TIME, serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_DECIMAL, serdeConstants.DECIMAL_TYPE_NAME);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index 665c3bb..85a8e2d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -127,6 +127,7 @@ KW_DOUBLE: 'DOUBLE';
KW_DATE: 'DATE';
KW_DATETIME: 'DATETIME';
KW_TIMESTAMP: 'TIMESTAMP';
+KW_TIMESTAMPTZ: 'TIMESTAMPTZ';
KW_INTERVAL: 'INTERVAL';
KW_DECIMAL: 'DECIMAL';
KW_STRING: 'STRING';
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index ecdefa9..daaa86e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -120,6 +120,7 @@ TOK_DATELITERAL;
TOK_DATETIME;
TOK_TIMESTAMP;
TOK_TIMESTAMPLITERAL;
+TOK_TIMESTAMPTZ;
TOK_INTERVAL_YEAR_MONTH;
TOK_INTERVAL_YEAR_MONTH_LITERAL;
TOK_INTERVAL_DAY_TIME;
@@ -2236,6 +2237,7 @@ primitiveType
| KW_DATE -> TOK_DATE
| KW_DATETIME -> TOK_DATETIME
| KW_TIMESTAMP -> TOK_TIMESTAMP
+ | KW_TIMESTAMPTZ -> TOK_TIMESTAMPTZ
// Uncomment to allow intervals as table column types
//| KW_INTERVAL KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH
//| KW_INTERVAL KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 82080eb..e92b2e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -732,6 +732,8 @@ public ColumnExprProcessor getColumnExprProcessor() {
serdeConstants.DATE_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
serdeConstants.TIMESTAMP_TYPE_NAME);
+ conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMPTZ,
+ serdeConstants.TIMESTAMPTZ_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_YEAR_MONTH,
serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_DAY_TIME,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
index a718264..7be1345 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
@@ -94,6 +94,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector;
@@ -737,6 +738,8 @@ public static ColStatistics getColStatistics(ColumnStatisticsObj cso, String tab
cs.setNumNulls(csd.getBinaryStats().getNumNulls());
} else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
cs.setAvgColLen(JavaDataModel.get().lengthOfTimestamp());
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
+ cs.setAvgColLen(JavaDataModel.get().lengthOfHiveTimestamp());
} else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
cs.setAvgColLen(JavaDataModel.get().lengthOfDecimal());
cs.setCountDistint(csd.getDecimalStats().getNumDVs());
@@ -1032,6 +1035,8 @@ public static long getAvgColLenOfFixedLengthTypes(String colType) {
return JavaDataModel.get().primitive2();
} else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
return JavaDataModel.get().lengthOfTimestamp();
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
+ return JavaDataModel.get().lengthOfHiveTimestamp();
} else if (colTypeLowerCase.equals(serdeConstants.DATE_TYPE_NAME)) {
return JavaDataModel.get().lengthOfDate();
} else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
@@ -1069,6 +1074,8 @@ public static long getSizeOfPrimitiveTypeArraysFromType(String colType, int leng
return JavaDataModel.get().lengthForBooleanArrayOfSize(length);
} else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
return JavaDataModel.get().lengthForTimestampArrayOfSize(length);
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
+ return JavaDataModel.get().lengthForHiveTimestampArrayOfSize(length);
} else if (colTypeLowerCase.equals(serdeConstants.DATE_TYPE_NAME)) {
return JavaDataModel.get().lengthForDateArrayOfSize(length);
} else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
@@ -1154,6 +1161,8 @@ public static long getWritableSize(ObjectInspector oi, Object value) {
return JavaDataModel.get().primitive1();
} else if (oi instanceof WritableTimestampObjectInspector) {
return JavaDataModel.get().lengthOfTimestamp();
+ } else if (oi instanceof WritableHiveTimestampObjectInspector) {
+ return JavaDataModel.get().lengthOfHiveTimestamp();
}
return 0;
@@ -1500,6 +1509,8 @@ public static long getDataSizeFromColumnStats(long numRows, List
sizeOf = JavaDataModel.get().lengthForByteArrayOfSize(acl);
} else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
sizeOf = JavaDataModel.get().lengthOfTimestamp();
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
+ sizeOf = JavaDataModel.get().lengthOfHiveTimestamp();
} else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
sizeOf = JavaDataModel.get().lengthOfDecimal();
} else if (colTypeLowerCase.equals(serdeConstants.DATE_TYPE_NAME)) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index 17b892c..d0bc7af 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -31,8 +31,10 @@
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
@@ -182,6 +184,14 @@ public BooleanWritable evaluate(DateWritable d) {
}
public BooleanWritable evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public BooleanWritable evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
+ private BooleanWritable evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
index efae82d..85a436b 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
@@ -26,8 +26,10 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazy.LazyByte;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.io.BooleanWritable;
@@ -183,10 +185,18 @@ public ByteWritable evaluate(Text i) {
}
public ByteWritable evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public ByteWritable evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
+ private ByteWritable evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
- byteWritable.set((byte)i.getSeconds());
+ byteWritable.set((byte) i.getSeconds());
return byteWritable;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
index 9cbc114..3f23b29 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
@@ -26,8 +26,10 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -180,6 +182,14 @@ public DoubleWritable evaluate(Text i) {
}
public DoubleWritable evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public DoubleWritable evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
+ private DoubleWritable evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
index 5808c90..c11c447 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
@@ -26,8 +26,10 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -181,6 +183,14 @@ public FloatWritable evaluate(Text i) {
}
public FloatWritable evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public FloatWritable evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
+ private FloatWritable evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
index a7551cb..dc02557 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
@@ -27,8 +27,10 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.io.BooleanWritable;
@@ -184,6 +186,14 @@ public IntWritable evaluate(Text i) {
}
}
+ public IntWritable evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public IntWritable evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
/**
* Convert from Timestamp to an integer. This is called for CAST(... AS INT)
*
@@ -191,7 +201,7 @@ public IntWritable evaluate(Text i) {
* The Timestamp value to convert
* @return IntWritable
*/
- public IntWritable evaluate(TimestampWritable i) {
+ private IntWritable evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
index c961d14..06db8d3 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
@@ -26,8 +26,10 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.io.BooleanWritable;
@@ -195,6 +197,14 @@ public LongWritable evaluate(Text i) {
}
public LongWritable evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public LongWritable evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
+ private LongWritable evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
index 570408a..0639fc0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
@@ -27,8 +27,10 @@
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazy.LazyShort;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.io.BooleanWritable;
@@ -185,6 +187,14 @@ public ShortWritable evaluate(Text i) {
}
public ShortWritable evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public ShortWritable evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
+ private ShortWritable evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
index 5cacd59..7d50da5 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
@@ -24,8 +24,10 @@
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
import org.apache.hadoop.io.BooleanWritable;
@@ -144,6 +146,14 @@ public Text evaluate(DateWritable d) {
}
public Text evaluate(TimestampWritable i) {
+ return evalTS(i);
+ }
+
+ public Text evaluate(HiveTimestampWritable i) {
+ return evalTS(i);
+ }
+
+ private Text evalTS(TimestampWritableBase i) {
if (i == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
index 259fde8..4867955 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
@@ -403,6 +403,7 @@ protected void obtainDateConverter(ObjectInspector[] arguments, int i,
case TIMESTAMP:
case DATE:
case VOID:
+ case TIMESTAMPTZ:
outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
break;
default:
@@ -499,6 +500,7 @@ protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory
break;
case TIMESTAMP:
case DATE:
+ case TIMESTAMPTZ:
Object writableValue = converters[i].convert(obj);
date = ((DateWritable) writableValue).get();
break;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
new file mode 100644
index 0000000..85f9d99
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampTZConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+ * Convert from string to TIMESTAMPTZ.
+ */
+@Description(name = "timestamptz",
+ value = "CAST(STRING as TIMESTAMPTZ) - returns the timestamptz represented by string.",
+ extended = "The string should be of format 'yyyy-MM-dd HH:mm:ss.[fff...] TimezoneID'. " +
+ "TimezoneID needs to be understood by java.util.TimeZone.")
+public class GenericUDFToTimestampTZ extends GenericUDF {
+
+ private transient PrimitiveObjectInspector argumentOI;
+ private transient TimestampTZConverter converter;
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ if (arguments.length < 1) {
+ throw new UDFArgumentLengthException(
+ "The function CAST as TIMESTAMPTZ requires at least one argument, got "
+ + arguments.length);
+ }
+ try {
+ argumentOI = (PrimitiveObjectInspector) arguments[0];
+ switch (argumentOI.getPrimitiveCategory()) {
+ case CHAR:
+ case VARCHAR:
+ case STRING:
+ case TIMESTAMPTZ:
+ break;
+ default:
+ throw new UDFArgumentException(
+ "CAST as TIMESTAMPTZ only allows string or timestamptz types");
+ }
+ } catch (ClassCastException e) {
+ throw new UDFArgumentException(
+ "The function CAST as TIMESTAMPTZ takes only primitive types");
+ }
+ converter = new TimestampTZConverter(argumentOI,
+ PrimitiveObjectInspectorFactory.writableHiveTimestampObjectInspector);
+ return PrimitiveObjectInspectorFactory.writableHiveTimestampObjectInspector;
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ Object o0 = arguments[0].get();
+ if (o0 == null) {
+ return null;
+ }
+ return converter.convert(o0);
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ assert (children.length == 1);
+ return "CAST(" + children[0] + " AS TIMESTAMPTZ)";
+ }
+}
diff --git a/ql/src/test/queries/clientpositive/timestamptz.q b/ql/src/test/queries/clientpositive/timestamptz.q
new file mode 100644
index 0000000..daff4eb
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz.q
@@ -0,0 +1,5 @@
+explain select cast('2005-01-03 02:01:00 GMT' as timestamptz);
+select cast('2005-01-03 02:01:00 GMT' as timestamptz);
+
+explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz);
+select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz);
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/timestamptz_1.q b/ql/src/test/queries/clientpositive/timestamptz_1.q
new file mode 100644
index 0000000..78b7526
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz_1.q
@@ -0,0 +1,37 @@
+set hive.fetch.task.conversion=more;
+
+drop table tstz1;
+
+create table tstz1(t timestamptz);
+
+insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamptz);
+select cast(t as boolean) from tstz1;
+select cast(t as tinyint) from tstz1;
+select cast(t as smallint) from tstz1;
+select cast(t as int) from tstz1;
+select cast(t as bigint) from tstz1;
+select cast(t as string) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.1 America/Los_Angeles';
+select cast(t as boolean) from tstz1;
+select cast(t as tinyint) from tstz1;
+select cast(t as smallint) from tstz1;
+select cast(t as int) from tstz1;
+select cast(t as bigint) from tstz1;
+select cast(t as string) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 America/Los_Angeles';
+select cast(t as boolean) from tstz1;
+select cast(t as tinyint) from tstz1;
+select cast(t as smallint) from tstz1;
+select cast(t as int) from tstz1;
+select cast(t as bigint) from tstz1;
+select cast(t as string) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 America/Los_Angeles';
+select cast(t as boolean) from tstz1;
+select cast(t as tinyint) from tstz1;
+select cast(t as smallint) from tstz1;
+select cast(t as int) from tstz1;
+select cast(t as bigint) from tstz1;
+select cast(t as string) from tstz1;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/timestamptz_2.q b/ql/src/test/queries/clientpositive/timestamptz_2.q
new file mode 100644
index 0000000..2840865
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz_2.q
@@ -0,0 +1,19 @@
+set hive.fetch.task.conversion=more;
+
+drop table tstz2;
+
+create table tstz2(t timestamptz);
+
+insert into table tstz2 values
+ ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),
+ ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00');
+
+select * from tstz2 where t='2005-01-03 02:01:00 GMT';
+
+select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00';
+
+select min(t),max(t) from tstz2;
+
+select t from tstz2 group by t order by t;
+
+select * from tstz2 a join tstz2 b on a.t=b.t order by a.t;
\ No newline at end of file
diff --git a/ql/src/test/results/clientpositive/timestamptz.q.out b/ql/src/test/results/clientpositive/timestamptz.q.out
new file mode 100644
index 0000000..5ab2e98
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz.q.out
@@ -0,0 +1,62 @@
+PREHOOK: query: explain select cast('2005-01-03 02:01:00 GMT' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2005-01-03 02:01:00 GMT' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 2005-01-03 02:01:00.0 GMT (type: timestamptz)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select cast('2005-01-03 02:01:00 GMT' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2005-01-03 02:01:00 GMT' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2005-01-03 02:01:00 GMT
+PREHOOK: query: explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 2016-01-03 12:26:34.0123 GMT-08:00 (type: timestamptz)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123 GMT-08:00
diff --git a/ql/src/test/results/clientpositive/timestamptz_1.q.out b/ql/src/test/results/clientpositive/timestamptz_1.q.out
new file mode 100644
index 0000000..cfdfc6b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz_1.q.out
@@ -0,0 +1,264 @@
+PREHOOK: query: drop table tstz1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstz1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstz1(t timestamptz)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: create table tstz1(t timestamptz)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tstz1
+PREHOOK: query: insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as boolean) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+true
+PREHOOK: query: select cast(t as tinyint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as tinyint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-6
+PREHOOK: query: select cast(t as smallint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as smallint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-31750
+PREHOOK: query: select cast(t as int) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as int) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as bigint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as bigint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34 GMT-08:00
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.1 America/Los_Angeles'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.1 America/Los_Angeles'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as boolean) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+true
+PREHOOK: query: select cast(t as tinyint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as tinyint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-6
+PREHOOK: query: select cast(t as smallint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as smallint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-31750
+PREHOOK: query: select cast(t as int) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as int) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as bigint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as bigint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.1 GMT-08:00
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 America/Los_Angeles'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 America/Los_Angeles'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as boolean) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+true
+PREHOOK: query: select cast(t as tinyint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as tinyint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-6
+PREHOOK: query: select cast(t as smallint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as smallint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-31750
+PREHOOK: query: select cast(t as int) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as int) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as bigint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as bigint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123 GMT-08:00
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 America/Los_Angeles'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 America/Los_Angeles'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as boolean) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as boolean) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+true
+PREHOOK: query: select cast(t as tinyint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as tinyint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-6
+PREHOOK: query: select cast(t as smallint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as smallint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+-31750
+PREHOOK: query: select cast(t as int) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as int) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as bigint) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as bigint) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+1451852794
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123 GMT-08:00
diff --git a/ql/src/test/results/clientpositive/timestamptz_2.q.out b/ql/src/test/results/clientpositive/timestamptz_2.q.out
new file mode 100644
index 0000000..18eb4fb
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz_2.q.out
@@ -0,0 +1,76 @@
+PREHOOK: query: drop table tstz2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstz2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstz2(t timestamptz)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tstz2
+POSTHOOK: query: create table tstz2(t timestamptz)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tstz2
+PREHOOK: query: insert into table tstz2 values
+ ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),
+ ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@tstz2
+POSTHOOK: query: insert into table tstz2 values
+ ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),
+ ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@tstz2
+POSTHOOK: Lineage: tstz2.t EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: select * from tstz2 where t='2005-01-03 02:01:00 GMT'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 where t='2005-01-03 02:01:00 GMT'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00 GMT
+PREHOOK: query: select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123 GMT+08:00
+PREHOOK: query: select min(t),max(t) from tstz2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select min(t),max(t) from tstz2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00 GMT 2016-01-03 12:26:34.0123 GMT+08:00
+PREHOOK: query: select t from tstz2 group by t order by t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select t from tstz2 group by t order by t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00 GMT
+2005-04-03 03:01:00.04067 GMT-07:00
+2013-06-03 02:01:00.30547 GMT+01:00
+2016-01-03 12:26:34.0123 GMT+08:00
+PREHOOK: query: select * from tstz2 a join tstz2 b on a.t=b.t order by a.t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 a join tstz2 b on a.t=b.t order by a.t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00 GMT 2005-01-03 02:01:00 GMT
+2005-04-03 03:01:00.04067 GMT-07:00 2005-04-03 03:01:00.04067 GMT-07:00
+2013-06-03 02:01:00.30547 GMT+01:00 2013-06-03 02:01:00.30547 GMT+01:00
+2016-01-03 12:26:34.0123 GMT+08:00 2016-01-03 12:26:34.0123 GMT+08:00
diff --git a/serde/if/serde.thrift b/serde/if/serde.thrift
index 6caad36..dd2d833 100644
--- a/serde/if/serde.thrift
+++ b/serde/if/serde.thrift
@@ -60,6 +60,7 @@ const string VARCHAR_TYPE_NAME = "varchar";
const string DATE_TYPE_NAME = "date";
const string DATETIME_TYPE_NAME = "datetime";
const string TIMESTAMP_TYPE_NAME = "timestamp";
+const string TIMESTAMPTZ_TYPE_NAME = "timestamptz";
const string DECIMAL_TYPE_NAME = "decimal";
const string BINARY_TYPE_NAME = "binary";
const string INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month";
@@ -90,6 +91,7 @@ const set PrimitiveTypes = [
DATE_TYPE_NAME
DATETIME_TYPE_NAME
TIMESTAMP_TYPE_NAME
+ TIMESTAMPTZ_TYPE_NAME
INTERVAL_YEAR_MONTH_TYPE_NAME
INTERVAL_DAY_TIME_TYPE_NAME
DECIMAL_TYPE_NAME
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
index 3a675bf..007d8e6 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
@@ -77,6 +77,8 @@ serdeConstants::serdeConstants() {
TIMESTAMP_TYPE_NAME = "timestamp";
+ TIMESTAMPTZ_TYPE_NAME = "timestamptz";
+
DECIMAL_TYPE_NAME = "decimal";
BINARY_TYPE_NAME = "binary";
@@ -113,6 +115,7 @@ serdeConstants::serdeConstants() {
PrimitiveTypes.insert("date");
PrimitiveTypes.insert("datetime");
PrimitiveTypes.insert("timestamp");
+ PrimitiveTypes.insert("timestamptz");
PrimitiveTypes.insert("interval_year_month");
PrimitiveTypes.insert("interval_day_time");
PrimitiveTypes.insert("decimal");
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.h b/serde/src/gen/thrift/gen-cpp/serde_constants.h
index a5f33fb..ddf6e57 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.h
@@ -48,6 +48,7 @@ class serdeConstants {
std::string DATE_TYPE_NAME;
std::string DATETIME_TYPE_NAME;
std::string TIMESTAMP_TYPE_NAME;
+ std::string TIMESTAMPTZ_TYPE_NAME;
std::string DECIMAL_TYPE_NAME;
std::string BINARY_TYPE_NAME;
std::string INTERVAL_YEAR_MONTH_TYPE_NAME;
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
index 04ed8f5..756f40d 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
@@ -102,6 +102,8 @@
public static final String TIMESTAMP_TYPE_NAME = "timestamp";
+ public static final String TIMESTAMPTZ_TYPE_NAME = "timestamptz";
+
public static final String DECIMAL_TYPE_NAME = "decimal";
public static final String BINARY_TYPE_NAME = "binary";
@@ -140,6 +142,7 @@
PrimitiveTypes.add("date");
PrimitiveTypes.add("datetime");
PrimitiveTypes.add("timestamp");
+ PrimitiveTypes.add("timestamptz");
PrimitiveTypes.add("interval_year_month");
PrimitiveTypes.add("interval_day_time");
PrimitiveTypes.add("decimal");
diff --git a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
index 18c3991..3164c33 100644
--- a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
+++ b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
@@ -51,6 +51,7 @@ final class Constant extends \Thrift\Type\TConstant {
static protected $DATE_TYPE_NAME;
static protected $DATETIME_TYPE_NAME;
static protected $TIMESTAMP_TYPE_NAME;
+ static protected $TIMESTAMPTZ_TYPE_NAME;
static protected $DECIMAL_TYPE_NAME;
static protected $BINARY_TYPE_NAME;
static protected $INTERVAL_YEAR_MONTH_TYPE_NAME;
@@ -198,6 +199,10 @@ final class Constant extends \Thrift\Type\TConstant {
return "timestamp";
}
+ static protected function init_TIMESTAMPTZ_TYPE_NAME() {
+ return "timestamptz";
+ }
+
static protected function init_DECIMAL_TYPE_NAME() {
return "decimal";
}
@@ -258,6 +263,7 @@ final class Constant extends \Thrift\Type\TConstant {
"date" => true,
"datetime" => true,
"timestamp" => true,
+ "timestamptz" => true,
"interval_year_month" => true,
"interval_day_time" => true,
"decimal" => true,
diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
index fafdc24..6f3ce18 100644
--- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
+++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
@@ -42,6 +42,7 @@
DATE_TYPE_NAME = "date"
DATETIME_TYPE_NAME = "datetime"
TIMESTAMP_TYPE_NAME = "timestamp"
+TIMESTAMPTZ_TYPE_NAME = "timestamptz"
DECIMAL_TYPE_NAME = "decimal"
BINARY_TYPE_NAME = "binary"
INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month"
@@ -68,6 +69,7 @@
"date",
"datetime",
"timestamp",
+ "timestamptz",
"interval_year_month",
"interval_day_time",
"decimal",
diff --git a/serde/src/gen/thrift/gen-rb/serde_constants.rb b/serde/src/gen/thrift/gen-rb/serde_constants.rb
index 0ce9f27..0d3fb99 100644
--- a/serde/src/gen/thrift/gen-rb/serde_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/serde_constants.rb
@@ -73,6 +73,8 @@ DATETIME_TYPE_NAME = %q"datetime"
TIMESTAMP_TYPE_NAME = %q"timestamp"
+TIMESTAMPTZ_TYPE_NAME = %q"timestamptz"
+
DECIMAL_TYPE_NAME = %q"decimal"
BINARY_TYPE_NAME = %q"binary"
@@ -110,6 +112,7 @@ PrimitiveTypes = Set.new([
%q"date",
%q"datetime",
%q"timestamp",
+ %q"timestamptz",
%q"interval_year_month",
%q"interval_day_time",
%q"decimal",
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
index 6e08dfd..dcbf36a 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
@@ -43,6 +43,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveTimestampObjectorInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
@@ -274,6 +275,12 @@ static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi, Stri
sb.append('"');
break;
}
+ case TIMESTAMPTZ: {
+ sb.append('"');
+ sb.append(((HiveTimestampObjectorInspector) poi)
+ .getPrimitiveWritableObject(o));
+ sb.append('"');
+ }
case BINARY: {
BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
Text txt = new Text();
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index 5e119d7..0f62093 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -47,9 +47,11 @@
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -68,6 +70,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveTimestampObjectorInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
@@ -409,13 +412,12 @@ static Object deserialize(InputByteBuffer buffer, TypeInfo type,
case TIMESTAMP:
TimestampWritable t = (reuse == null ? new TimestampWritable() :
(TimestampWritable) reuse);
- byte[] bytes = new byte[TimestampWritable.BINARY_SORTABLE_LENGTH];
+ return deserializeTimestampWritable(buffer, t, invert);
- for (int i = 0; i < bytes.length; i++) {
- bytes[i] = buffer.read(invert);
- }
- t.setBinarySortable(bytes, 0);
- return t;
+ case TIMESTAMPTZ:
+ HiveTimestampWritable ht = (reuse == null ? new HiveTimestampWritable() :
+ (HiveTimestampWritable) reuse);
+ return deserializeTimestampWritable(buffer, ht, invert);
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthWritable i = reuse == null ? new HiveIntervalYearMonthWritable()
@@ -784,6 +786,12 @@ static void serialize(ByteStream.Output buffer, Object o, ObjectInspector oi,
serializeTimestampWritable(buffer, t, invert);
return;
}
+ case TIMESTAMPTZ: {
+ HiveTimestampObjectorInspector toi = (HiveTimestampObjectorInspector) poi;
+ HiveTimestampWritable t = toi.getPrimitiveWritableObject(o);
+ serializeTimestampWritable(buffer, t, invert);
+ return;
+ }
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthObjectInspector ioi = (HiveIntervalYearMonthObjectInspector) poi;
HiveIntervalYearMonth intervalYearMonth = ioi.getPrimitiveJavaObject(o);
@@ -959,13 +967,29 @@ public static void serializeDouble(ByteStream.Output buffer, double vd, boolean
writeByte(buffer, (byte) v, invert);
}
- public static void serializeTimestampWritable(ByteStream.Output buffer, TimestampWritable t, boolean invert) {
+ public static void serializeTimestampWritable(ByteStream.Output buffer,
+ TimestampWritableBase t, boolean invert) {
byte[] data = t.getBinarySortable();
for (int i = 0; i < data.length; i++) {
writeByte(buffer, data[i], invert);
}
}
+ public static TimestampWritableBase deserializeTimestampWritable(InputByteBuffer buffer,
+ TimestampWritableBase t, boolean invert, byte[] bytes) throws IOException {
+ for (int i = 0; i < bytes.length; i++) {
+ bytes[i] = buffer.read(invert);
+ }
+ t.setBinarySortable(bytes, 0);
+ return t;
+ }
+
+ public static TimestampWritableBase deserializeTimestampWritable(InputByteBuffer buffer,
+ TimestampWritableBase t, boolean invert) throws IOException {
+ byte[] bytes = new byte[t.binSortableLen()];
+ return deserializeTimestampWritable(buffer, t, invert, bytes);
+ }
+
public static void serializeHiveIntervalYearMonth(ByteStream.Output buffer,
HiveIntervalYearMonth intervalYearMonth, boolean invert) {
int totalMonths = intervalYearMonth.getTotalMonths();
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
index 003a2d4..be5357a 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
@@ -228,7 +228,7 @@ public boolean readCheckNull() throws IOException {
case TIMESTAMP:
{
if (tempTimestampBytes == null) {
- tempTimestampBytes = new byte[TimestampWritable.BINARY_SORTABLE_LENGTH];
+ tempTimestampBytes = new byte[currentTimestampWritable.binSortableLen()];
}
final boolean invert = columnSortOrderIsDesc[fieldIndex];
for (int i = 0; i < tempTimestampBytes.length; i++) {
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveTimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveTimestampWritable.java
new file mode 100644
index 0000000..f9d00c7
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveTimestampWritable.java
@@ -0,0 +1,78 @@
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+
+import java.sql.Timestamp;
+
+/**
+ * Writable for HiveTimestamp.
+ */
+public class HiveTimestampWritable extends TimestampWritableBase {
+
+ public HiveTimestampWritable() {
+ timestamp = new HiveTimestamp(0, 0);
+ }
+
+ public HiveTimestampWritable(byte[] bytes, int offset) {
+ timestamp = new HiveTimestamp(0, 0);
+ set(bytes, offset);
+ }
+
+ public HiveTimestampWritable(HiveTimestampWritable t) {
+ this(t.getBytes(), 0);
+ }
+
+ public HiveTimestampWritable(HiveTimestamp t) {
+ timestamp = new HiveTimestamp(0, 0);
+ set(t);
+ }
+
+ @Override
+ public int maxNumBytes() {
+ return 16;
+ }
+
+ @Override
+ public int binSortableLen() {
+ return 15;
+ }
+
+ @Override
+ public void set(Timestamp t) {
+ if (t != null) {
+ Preconditions.checkArgument(t.getClass().equals(HiveTimestamp.class));
+ }
+ super.set(t);
+ }
+
+ @Override
+ public byte[] getBinarySortable() {
+ byte[] b = super.getBinarySortable();
+ Integer tzOffset = getTimezoneOffset();
+ Preconditions.checkArgument(tzOffset != null);
+ intToBytes(tzOffset ^ DECIMAL_OR_SECOND_VINT_FLAG, b, 11);
+ return b;
+ }
+
+ @Override
+ public void setBinarySortable(byte[] bytes, int binSortOffset) {
+ super.setBinarySortable(bytes, binSortOffset);
+ int tzOffset = bytesToInt(bytes, binSortOffset + 11) ^ DECIMAL_OR_SECOND_VINT_FLAG;
+ ((HiveTimestamp) timestamp).setOffsetInMin(tzOffset);
+ }
+
+ public HiveTimestamp getTimestamp() {
+ if (timestampEmpty) {
+ populateTimestamp();
+ }
+ return (HiveTimestamp) timestamp;
+ }
+
+ public static HiveTimestamp createHiveTimestamp(byte[] bytes, int offset) {
+ HiveTimestamp t = new HiveTimestamp(0, 0);
+ TimestampWritableBase.setTimestamp(t, bytes, offset);
+ return t;
+ }
+
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
index bbccc7f..02d23bb 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,90 +17,21 @@
*/
package org.apache.hadoop.hive.serde2.io;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.sql.Timestamp;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
+import com.google.common.base.Preconditions;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.util.TimestampUtils;
-import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableUtils;
+import java.sql.Timestamp;
/**
- * TimestampWritable
- * Writable equivalent of java.sq.Timestamp
- *
- * Timestamps are of the format
- * YYYY-MM-DD HH:MM:SS.[fff...]
- *
- * We encode Unix timestamp in seconds in 4 bytes, using the MSB to signify
- * whether the timestamp has a fractional portion.
- *
- * The fractional portion is reversed, and encoded as a VInt
- * so timestamps with less precision use fewer bytes.
- *
- * 0.1 -> 1
- * 0.01 -> 10
- * 0.001 -> 100
- *
+ * Writable for Timestamp.
*/
-public class TimestampWritable implements WritableComparable {
-
- static final public byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
-
- private static final int DECIMAL_OR_SECOND_VINT_FLAG = 0x80000000;
- private static final int LOWEST_31_BITS_OF_SEC_MASK = 0x7fffffff;
-
- private static final long SEVEN_BYTE_LONG_SIGN_FLIP = 0xff80L << 48;
-
-
- /** The maximum number of bytes required for a TimestampWritable */
- public static final int MAX_BYTES = 13;
-
- public static final int BINARY_SORTABLE_LENGTH = 11;
-
- private static final ThreadLocal threadLocalDateFormat =
- new ThreadLocal() {
- @Override
- protected DateFormat initialValue() {
- return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- }
- };
-
- private Timestamp timestamp = new Timestamp(0);
-
- /**
- * true if data is stored in timestamp field rather than byte arrays.
- * allows for lazy conversion to bytes when necessary
- * false otherwise
- */
- private boolean bytesEmpty;
- private boolean timestampEmpty;
-
- /* Allow use of external byte[] for efficiency */
- private byte[] currentBytes;
- private final byte[] internalBytes = new byte[MAX_BYTES];
- private byte[] externalBytes;
- private int offset;
+public class TimestampWritable extends TimestampWritableBase {
- /* Constructors */
public TimestampWritable() {
- bytesEmpty = false;
- currentBytes = internalBytes;
- offset = 0;
-
- clearTimestamp();
+ timestamp = new Timestamp(0);
}
public TimestampWritable(byte[] bytes, int offset) {
+ timestamp = new Timestamp(0);
set(bytes, offset);
}
@@ -109,531 +40,48 @@ public TimestampWritable(TimestampWritable t) {
}
public TimestampWritable(Timestamp t) {
+ timestamp = new Timestamp(0);
set(t);
}
- public void set(byte[] bytes, int offset) {
- externalBytes = bytes;
- this.offset = offset;
- bytesEmpty = false;
- currentBytes = externalBytes;
-
- clearTimestamp();
- }
-
- public void setTime(long time) {
- timestamp.setTime(time);
- bytesEmpty = true;
- timestampEmpty = false;
- }
-
+ @Override
public void set(Timestamp t) {
- if (t == null) {
- timestamp.setTime(0);
- timestamp.setNanos(0);
- return;
- }
- this.timestamp = t;
- bytesEmpty = true;
- timestampEmpty = false;
- }
-
- public void set(TimestampWritable t) {
- if (t.bytesEmpty) {
- set(t.getTimestamp());
- return;
- }
- if (t.currentBytes == t.externalBytes) {
- set(t.currentBytes, t.offset);
- } else {
- set(t.currentBytes, 0);
- }
- }
-
- public static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) {
- ((Date) timestamp).setTime(secondsAsMillis);
- timestamp.setNanos(nanos);
- }
-
- public void setInternal(long secondsAsMillis, int nanos) {
-
- // This is our way of documenting that we are MUTATING the contents of
- // this writable's internal timestamp.
- updateTimestamp(timestamp, secondsAsMillis, nanos);
-
- bytesEmpty = true;
- timestampEmpty = false;
- }
-
- private void clearTimestamp() {
- timestampEmpty = true;
- }
-
- public void writeToByteStream(RandomAccessOutput byteStream) {
- checkBytes();
- byteStream.write(currentBytes, offset, getTotalLength());
- }
-
- /**
- *
- * @return seconds corresponding to this TimestampWritable
- */
- public long getSeconds() {
- if (!timestampEmpty) {
- return TimestampUtils.millisToSeconds(timestamp.getTime());
- } else if (!bytesEmpty) {
- return TimestampWritable.getSeconds(currentBytes, offset);
- } else {
- throw new IllegalStateException("Both timestamp and bytes are empty");
- }
- }
-
- /**
- *
- * @return nanoseconds in this TimestampWritable
- */
- public int getNanos() {
- if (!timestampEmpty) {
- return timestamp.getNanos();
- } else if (!bytesEmpty) {
- return hasDecimalOrSecondVInt() ?
- TimestampWritable.getNanos(currentBytes, offset + 4) : 0;
- } else {
- throw new IllegalStateException("Both timestamp and bytes are empty");
- }
- }
-
- /**
- * @return length of serialized TimestampWritable data. As a side effect, populates the internal
- * byte array if empty.
- */
- int getTotalLength() {
- checkBytes();
- return getTotalLength(currentBytes, offset);
- }
-
- public static int getTotalLength(byte[] bytes, int offset) {
- int len = 4;
- if (hasDecimalOrSecondVInt(bytes[offset])) {
- int firstVIntLen = WritableUtils.decodeVIntSize(bytes[offset + 4]);
- len += firstVIntLen;
- if (hasSecondVInt(bytes[offset + 4])) {
- len += WritableUtils.decodeVIntSize(bytes[offset + 4 + firstVIntLen]);
- }
- }
- return len;
- }
-
- public Timestamp getTimestamp() {
- if (timestampEmpty) {
- populateTimestamp();
- }
- return timestamp;
- }
-
- /**
- * Used to create copies of objects
- * @return a copy of the internal TimestampWritable byte[]
- */
- public byte[] getBytes() {
- checkBytes();
-
- int len = getTotalLength();
- byte[] b = new byte[len];
-
- System.arraycopy(currentBytes, offset, b, 0, len);
- return b;
- }
-
- /**
- * @return byte[] representation of TimestampWritable that is binary
- * sortable (7 bytes for seconds, 4 bytes for nanoseconds)
- */
- public byte[] getBinarySortable() {
- byte[] b = new byte[BINARY_SORTABLE_LENGTH];
- int nanos = getNanos();
- // We flip the highest-order bit of the seven-byte representation of seconds to make negative
- // values come before positive ones.
- long seconds = getSeconds() ^ SEVEN_BYTE_LONG_SIGN_FLIP;
- sevenByteLongToBytes(seconds, b, 0);
- intToBytes(nanos, b, 7);
- return b;
- }
-
- /**
- * Given a byte[] that has binary sortable data, initialize the internal
- * structures to hold that data
- * @param bytes the byte array that holds the binary sortable representation
- * @param binSortOffset offset of the binary-sortable representation within the buffer.
- */
- public void setBinarySortable(byte[] bytes, int binSortOffset) {
- // Flip the sign bit (and unused bits of the high-order byte) of the seven-byte long back.
- long seconds = readSevenByteLong(bytes, binSortOffset) ^ SEVEN_BYTE_LONG_SIGN_FLIP;
- int nanos = bytesToInt(bytes, binSortOffset + 7);
- int firstInt = (int) seconds;
- boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
- if (nanos != 0 || hasSecondVInt) {
- firstInt |= DECIMAL_OR_SECOND_VINT_FLAG;
- } else {
- firstInt &= LOWEST_31_BITS_OF_SEC_MASK;
- }
-
- intToBytes(firstInt, internalBytes, 0);
- setNanosBytes(nanos, internalBytes, 4, hasSecondVInt);
- if (hasSecondVInt) {
- LazyBinaryUtils.writeVLongToByteArray(internalBytes,
- 4 + WritableUtils.decodeVIntSize(internalBytes[4]),
- seconds >> 31);
- }
-
- currentBytes = internalBytes;
- this.offset = 0;
- }
-
- /**
- * The data of TimestampWritable can be stored either in a byte[]
- * or in a Timestamp object. Calling this method ensures that the byte[]
- * is populated from the Timestamp object if previously empty.
- */
- private void checkBytes() {
- if (bytesEmpty) {
- // Populate byte[] from Timestamp
- convertTimestampToBytes(timestamp, internalBytes, 0);
- offset = 0;
- currentBytes = internalBytes;
- bytesEmpty = false;
+ if (t != null) {
+ Preconditions.checkArgument(t.getClass().equals(Timestamp.class));
}
- }
-
- /**
- *
- * @return double representation of the timestamp, accurate to nanoseconds
- */
- public double getDouble() {
- double seconds, nanos;
- if (bytesEmpty) {
- seconds = TimestampUtils.millisToSeconds(timestamp.getTime());
- nanos = timestamp.getNanos();
- } else {
- seconds = getSeconds();
- nanos = getNanos();
- }
- return seconds + nanos / 1000000000;
- }
-
- public static long getLong(Timestamp timestamp) {
- return timestamp.getTime() / 1000;
- }
-
- public void readFields(DataInput in) throws IOException {
- in.readFully(internalBytes, 0, 4);
- if (TimestampWritable.hasDecimalOrSecondVInt(internalBytes[0])) {
- in.readFully(internalBytes, 4, 1);
- int len = (byte) WritableUtils.decodeVIntSize(internalBytes[4]);
- if (len > 1) {
- in.readFully(internalBytes, 5, len-1);
- }
-
- long vlong = LazyBinaryUtils.readVLongFromByteArray(internalBytes, 4);
- if (vlong < -1000000000 || vlong > 999999999) {
- throw new IOException(
- "Invalid first vint value (encoded nanoseconds) of a TimestampWritable: " + vlong +
- ", expected to be between -1000000000 and 999999999.");
- // Note that -1000000000 is a valid value corresponding to a nanosecond timestamp
- // of 999999999, because if the second VInt is present, we use the value
- // (-reversedNanoseconds - 1) as the second VInt.
- }
- if (vlong < 0) {
- // This indicates there is a second VInt containing the additional bits of the seconds
- // field.
- in.readFully(internalBytes, 4 + len, 1);
- int secondVIntLen = (byte) WritableUtils.decodeVIntSize(internalBytes[4 + len]);
- if (secondVIntLen > 1) {
- in.readFully(internalBytes, 5 + len, secondVIntLen - 1);
- }
- }
- }
- currentBytes = internalBytes;
- this.offset = 0;
- }
-
- public void write(DataOutput out) throws IOException {
- checkBytes();
- out.write(currentBytes, offset, getTotalLength());
- }
-
- public int compareTo(TimestampWritable t) {
- checkBytes();
- long s1 = this.getSeconds();
- long s2 = t.getSeconds();
- if (s1 == s2) {
- int n1 = this.getNanos();
- int n2 = t.getNanos();
- if (n1 == n2) {
- return 0;
- }
- return n1 - n2;
- } else {
- return s1 < s2 ? -1 : 1;
- }
- }
-
- @Override
- public boolean equals(Object o) {
- return compareTo((TimestampWritable) o) == 0;
+ super.set(t);
}
@Override
- public String toString() {
- if (timestampEmpty) {
- populateTimestamp();
- }
-
- String timestampString = timestamp.toString();
- if (timestampString.length() > 19) {
- if (timestampString.length() == 21) {
- if (timestampString.substring(19).compareTo(".0") == 0) {
- return threadLocalDateFormat.get().format(timestamp);
- }
- }
- return threadLocalDateFormat.get().format(timestamp) + timestampString.substring(19);
- }
-
- return threadLocalDateFormat.get().format(timestamp);
+ public int maxNumBytes() {
+ return 13;
}
@Override
- public int hashCode() {
- long seconds = getSeconds();
- seconds <<= 30; // the nanosecond part fits in 30 bits
- seconds |= getNanos();
- return (int) ((seconds >>> 32) ^ seconds);
- }
-
- private void populateTimestamp() {
- long seconds = getSeconds();
- int nanos = getNanos();
- timestamp.setTime(seconds * 1000);
- timestamp.setNanos(nanos);
- }
-
- /** Static methods **/
-
- /**
- * Gets seconds stored as integer at bytes[offset]
- * @param bytes
- * @param offset
- * @return the number of seconds
- */
- public static long getSeconds(byte[] bytes, int offset) {
- int lowest31BitsOfSecondsAndFlag = bytesToInt(bytes, offset);
- if (lowest31BitsOfSecondsAndFlag >= 0 || // the "has decimal or second VInt" flag is not set
- !hasSecondVInt(bytes[offset + 4])) {
- // The entire seconds field is stored in the first 4 bytes.
- return lowest31BitsOfSecondsAndFlag & LOWEST_31_BITS_OF_SEC_MASK;
- }
-
- // We compose the seconds field from two parts. The lowest 31 bits come from the first four
- // bytes. The higher-order bits come from the second VInt that follows the nanos field.
- return ((long) (lowest31BitsOfSecondsAndFlag & LOWEST_31_BITS_OF_SEC_MASK)) |
- (LazyBinaryUtils.readVLongFromByteArray(bytes,
- offset + 4 + WritableUtils.decodeVIntSize(bytes[offset + 4])) << 31);
- }
-
- public static int getNanos(byte[] bytes, int offset) {
- VInt vInt = LazyBinaryUtils.threadLocalVInt.get();
- LazyBinaryUtils.readVInt(bytes, offset, vInt);
- int val = vInt.value;
- if (val < 0) {
- // This means there is a second VInt present that specifies additional bits of the timestamp.
- // The reversed nanoseconds value is still encoded in this VInt.
- val = -val - 1;
- }
- int len = (int) Math.floor(Math.log10(val)) + 1;
-
- // Reverse the value
- int tmp = 0;
- while (val != 0) {
- tmp *= 10;
- tmp += val % 10;
- val /= 10;
- }
- val = tmp;
-
- if (len < 9) {
- val *= Math.pow(10, 9 - len);
- }
- return val;
- }
-
- /**
- * Writes a Timestamp's serialized value to byte array b at the given offset
- * @param t to convert to bytes
- * @param b destination byte array
- * @param offset destination offset in the byte array
- */
- public static void convertTimestampToBytes(Timestamp t, byte[] b,
- int offset) {
- long millis = t.getTime();
- int nanos = t.getNanos();
-
- long seconds = TimestampUtils.millisToSeconds(millis);
- boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
- boolean hasDecimal = setNanosBytes(nanos, b, offset+4, hasSecondVInt);
-
- int firstInt = (int) seconds;
- if (hasDecimal || hasSecondVInt) {
- firstInt |= DECIMAL_OR_SECOND_VINT_FLAG;
- } else {
- firstInt &= LOWEST_31_BITS_OF_SEC_MASK;
- }
- intToBytes(firstInt, b, offset);
-
- if (hasSecondVInt) {
- LazyBinaryUtils.writeVLongToByteArray(b,
- offset + 4 + WritableUtils.decodeVIntSize(b[offset + 4]),
- seconds >> 31);
- }
+ public int binSortableLen() {
+ return 11;
}
/**
- * Given an integer representing nanoseconds, write its serialized
- * value to the byte array b at offset
- *
- * @param nanos
- * @param b
- * @param offset
- * @return
- */
- private static boolean setNanosBytes(int nanos, byte[] b, int offset, boolean hasSecondVInt) {
- int decimal = 0;
- if (nanos != 0) {
- int counter = 0;
- while (counter < 9) {
- decimal *= 10;
- decimal += nanos % 10;
- nanos /= 10;
- counter++;
- }
- }
-
- if (hasSecondVInt || decimal != 0) {
- // We use the sign of the reversed-nanoseconds field to indicate that there is a second VInt
- // present.
- LazyBinaryUtils.writeVLongToByteArray(b, offset, hasSecondVInt ? (-decimal - 1) : decimal);
- }
- return decimal != 0;
- }
-
- public HiveDecimal getHiveDecimal() {
- if (timestampEmpty) {
- populateTimestamp();
- }
- return getHiveDecimal(timestamp);
- }
-
- public static HiveDecimal getHiveDecimal(Timestamp timestamp) {
- // The BigDecimal class recommends not converting directly from double to BigDecimal,
- // so we convert through a string...
- Double timestampDouble = TimestampUtils.getDouble(timestamp);
- HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
- return result;
- }
-
-
- /**
* Converts the time in seconds or milliseconds to a timestamp.
* @param time time in seconds or in milliseconds
* @return the timestamp
*/
public static Timestamp longToTimestamp(long time, boolean intToTimestampInSeconds) {
- // If the time is in seconds, converts it to milliseconds first.
- return new Timestamp(intToTimestampInSeconds ? time * 1000 : time);
- }
-
- public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
- long seconds = getSeconds(bytes, offset);
- t.setTime(seconds * 1000);
- if (hasDecimalOrSecondVInt(bytes[offset])) {
- t.setNanos(getNanos(bytes, offset + 4));
- } else {
- t.setNanos(0);
- }
+ // If the time is in seconds, converts it to milliseconds first.
+ return new Timestamp(intToTimestampInSeconds ? time * 1000 : time);
}
public static Timestamp createTimestamp(byte[] bytes, int offset) {
Timestamp t = new Timestamp(0);
- TimestampWritable.setTimestamp(t, bytes, offset);
+ TimestampWritableBase.setTimestamp(t, bytes, offset);
return t;
}
- private static boolean hasDecimalOrSecondVInt(byte b) {
- return (b >> 7) != 0;
- }
-
- private static boolean hasSecondVInt(byte b) {
- return WritableUtils.isNegativeVInt(b);
- }
-
- private final boolean hasDecimalOrSecondVInt() {
- return hasDecimalOrSecondVInt(currentBytes[offset]);
- }
-
- public final boolean hasDecimal() {
- return hasDecimalOrSecondVInt() || currentBytes[offset + 4] != -1;
- // If the first byte of the VInt is -1, the VInt itself is -1, indicating that there is a
- // second VInt but the nanoseconds field is actually 0.
- }
-
- /**
- * Writes value into dest at offset
- * @param value
- * @param dest
- * @param offset
- */
- private static void intToBytes(int value, byte[] dest, int offset) {
- dest[offset] = (byte) ((value >> 24) & 0xFF);
- dest[offset+1] = (byte) ((value >> 16) & 0xFF);
- dest[offset+2] = (byte) ((value >> 8) & 0xFF);
- dest[offset+3] = (byte) (value & 0xFF);
- }
-
- /**
- * Writes value into dest at offset as a seven-byte
- * serialized long number.
- */
- static void sevenByteLongToBytes(long value, byte[] dest, int offset) {
- dest[offset] = (byte) ((value >> 48) & 0xFF);
- dest[offset+1] = (byte) ((value >> 40) & 0xFF);
- dest[offset+2] = (byte) ((value >> 32) & 0xFF);
- dest[offset+3] = (byte) ((value >> 24) & 0xFF);
- dest[offset+4] = (byte) ((value >> 16) & 0xFF);
- dest[offset+5] = (byte) ((value >> 8) & 0xFF);
- dest[offset+6] = (byte) (value & 0xFF);
- }
-
- /**
- *
- * @param bytes
- * @param offset
- * @return integer represented by the four bytes in bytes
- * beginning at offset
- */
- private static int bytesToInt(byte[] bytes, int offset) {
- return ((0xFF & bytes[offset]) << 24)
- | ((0xFF & bytes[offset+1]) << 16)
- | ((0xFF & bytes[offset+2]) << 8)
- | (0xFF & bytes[offset+3]);
- }
-
- static long readSevenByteLong(byte[] bytes, int offset) {
- // We need to shift everything 8 bits left and then shift back to populate the sign field.
- return (((0xFFL & bytes[offset]) << 56)
- | ((0xFFL & bytes[offset+1]) << 48)
- | ((0xFFL & bytes[offset+2]) << 40)
- | ((0xFFL & bytes[offset+3]) << 32)
- | ((0xFFL & bytes[offset+4]) << 24)
- | ((0xFFL & bytes[offset+5]) << 16)
- | ((0xFFL & bytes[offset+6]) << 8)) >> 8;
+ public Timestamp getTimestamp() {
+ if (timestampEmpty) {
+ populateTimestamp();
+ }
+ return timestamp;
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritableBase.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritableBase.java
new file mode 100644
index 0000000..4b75f38
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritableBase.java
@@ -0,0 +1,667 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.sql.Timestamp;
+import java.util.Date;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
+import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+
+/**
+ * TimestampWritableBase
+ * Writable equivalent of java.sq.Timestamp
+ *
+ * Timestamps are of the format
+ * YYYY-MM-DD HH:MM:SS.[fff...]
+ *
+ * We encode Unix timestamp in seconds in 4 bytes, using the MSB to signify
+ * whether the timestamp has a fractional portion.
+ *
+ * The fractional portion is reversed, and encoded as a VInt
+ * so timestamps with less precision use fewer bytes.
+ *
+ * 0.1 -> 1
+ * 0.01 -> 10
+ * 0.001 -> 100
+ *
+ */
+public abstract class TimestampWritableBase implements WritableComparable {
+
+ static final public byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
+
+ protected static final int DECIMAL_OR_SECOND_VINT_FLAG = 0x80000000;
+ private static final int LOWEST_31_BITS_OF_SEC_MASK = 0x7fffffff;
+
+ private static final long SEVEN_BYTE_LONG_SIGN_FLIP = 0xff80L << 48;
+
+ private static final int TIMEZONE_MASK = 1 << 30;
+
+
+ /** The maximum number of bytes required for a TimestampWritableBase */
+ public abstract int maxNumBytes();
+
+ public abstract int binSortableLen();
+
+ protected Timestamp timestamp;
+
+ /**
+ * true if data is stored in timestamp field rather than byte arrays.
+ * allows for lazy conversion to bytes when necessary
+ * false otherwise
+ */
+ protected boolean bytesEmpty;
+ protected boolean timestampEmpty;
+
+ /* Allow use of external byte[] for efficiency */
+ private byte[] currentBytes;
+ private final byte[] internalBytes = new byte[maxNumBytes()];
+ private byte[] externalBytes;
+ private int offset;
+
+ /* Constructors */
+ protected TimestampWritableBase() {
+ bytesEmpty = false;
+ currentBytes = internalBytes;
+ offset = 0;
+
+ clearTimestamp();
+ }
+
+ public void set(byte[] bytes, int offset) {
+ externalBytes = bytes;
+ this.offset = offset;
+ bytesEmpty = false;
+ currentBytes = externalBytes;
+
+ clearTimestamp();
+ }
+
+ public void setTime(long time) {
+ timestamp.setTime(time);
+ bytesEmpty = true;
+ timestampEmpty = false;
+ }
+
+ public void set(Timestamp t) {
+ if (t == null) {
+ timestamp.setTime(0);
+ timestamp.setNanos(0);
+ if (timestamp instanceof HiveTimestamp) {
+ ((HiveTimestamp) timestamp).setOffsetInMin(0);
+ }
+ return;
+ }
+ this.timestamp = t;
+ bytesEmpty = true;
+ timestampEmpty = false;
+ }
+
+ public void set(TimestampWritableBase t) {
+ t.checkBytes();
+ if (t.currentBytes == t.externalBytes) {
+ set(t.currentBytes, t.offset);
+ } else {
+ set(t.currentBytes, 0);
+ }
+ }
+
+ private static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) {
+ ((Date) timestamp).setTime(secondsAsMillis);
+ timestamp.setNanos(nanos);
+ }
+
+ public void setInternal(long secondsAsMillis, int nanos) {
+
+ // This is our way of documenting that we are MUTATING the contents of
+ // this writable's internal timestamp.
+ updateTimestamp(timestamp, secondsAsMillis, nanos);
+
+ bytesEmpty = true;
+ timestampEmpty = false;
+ }
+
+ private void clearTimestamp() {
+ timestampEmpty = true;
+ }
+
+ public void writeToByteStream(RandomAccessOutput byteStream) {
+ checkBytes();
+ byteStream.write(currentBytes, offset, getTotalLength());
+ }
+
+ /**
+ *
+ * @return seconds corresponding to this TimestampWritableBase
+ */
+ public long getSeconds() {
+ if (!timestampEmpty) {
+ return TimestampUtils.millisToSeconds(timestamp.getTime());
+ } else if (!bytesEmpty) {
+ return getSeconds(currentBytes, offset);
+ } else {
+ throw new IllegalStateException("Both timestamp and bytes are empty");
+ }
+ }
+
+ /**
+ *
+ * @return nanoseconds in this TimestampWritableBase
+ */
+ public int getNanos() {
+ if (!timestampEmpty) {
+ return timestamp.getNanos();
+ } else if (!bytesEmpty) {
+ return hasDecimalOrSecondVInt() ?
+ getNanos(currentBytes, offset + 4) : 0;
+ } else {
+ throw new IllegalStateException("Both timestamp and bytes are empty");
+ }
+ }
+
+ protected Integer getTimezoneOffset() {
+ if (!timestampEmpty) {
+ return timestamp instanceof HiveTimestamp ?
+ ((HiveTimestamp) timestamp).getOffsetInMin() : null;
+ } else if (!bytesEmpty) {
+ return hasDecimalOrSecondVInt() ? getTimezoneOffset(currentBytes, offset + 4) : null;
+ } else {
+ throw new IllegalStateException("Both timestamp and bytes are empty");
+ }
+ }
+
+ // offset should point to the start of decimal field
+ private static Integer getTimezoneOffset(byte[] bytes, final int offset) {
+ if (hasTimezoneOffset(bytes, offset)) {
+ int pos = offset + WritableUtils.decodeVIntSize(bytes[offset]);
+ // skip the 2nd VInt
+ if (hasSecondVInt(bytes[offset])) {
+ pos += WritableUtils.decodeVIntSize(bytes[pos]);
+ }
+ return readVInt(bytes, pos);
+ }
+ return null;
+ }
+
+ private static boolean hasTimezoneOffset(byte[] bytes, int offset) {
+ int val = readVInt(bytes, offset);
+ return (val >= 0 && (val & TIMEZONE_MASK) != 0) ||
+ (val < 0 && (val & TIMEZONE_MASK) == 0);
+ }
+
+ /**
+ * @return length of serialized TimestampWritableBase data. As a side effect, populates the internal
+ * byte array if empty.
+ */
+ int getTotalLength() {
+ checkBytes();
+ return getTotalLength(currentBytes, offset);
+ }
+
+ public static int getTotalLength(byte[] bytes, int offset) {
+ int pos = offset + 4;
+ if (hasDecimalOrSecondVInt(bytes[offset])) {
+ boolean hasSecondVInt = hasSecondVInt(bytes[pos]);
+ boolean hasTimezoneOffset = hasTimezoneOffset(bytes, pos);
+ pos += WritableUtils.decodeVIntSize(bytes[pos]);
+ if (hasSecondVInt) {
+ pos += WritableUtils.decodeVIntSize(bytes[pos]);
+ }
+ if (hasTimezoneOffset) {
+ pos += WritableUtils.decodeVIntSize(bytes[pos]);
+ }
+ }
+ return pos - offset;
+ }
+
+ /**
+ * Used to create copies of objects
+ * @return a copy of the internal TimestampWritableBase byte[]
+ */
+ public byte[] getBytes() {
+ checkBytes();
+
+ int len = getTotalLength();
+ byte[] b = new byte[len];
+
+ System.arraycopy(currentBytes, offset, b, 0, len);
+ return b;
+ }
+
+ /**
+ * @return byte[] representation of TimestampWritableBase that is binary
+ * sortable (7 bytes for seconds, 4 bytes for nanoseconds, 4 bytes for timezone offset)
+ */
+ public byte[] getBinarySortable() {
+ byte[] b = new byte[binSortableLen()];
+ int nanos = getNanos();
+ // We flip the highest-order bit of the seven-byte representation of seconds to make negative
+ // values come before positive ones.
+ long seconds = getSeconds() ^ SEVEN_BYTE_LONG_SIGN_FLIP;
+ sevenByteLongToBytes(seconds, b, 0);
+ intToBytes(nanos, b, 7);
+ return b;
+ }
+
+ /**
+ * Given a byte[] that has binary sortable data, initialize the internal
+ * structures to hold that data
+ * @param bytes the byte array that holds the binary sortable representation
+ * @param binSortOffset offset of the binary-sortable representation within the buffer.
+ */
+ public void setBinarySortable(byte[] bytes, int binSortOffset) {
+ // Flip the sign bit (and unused bits of the high-order byte) of the seven-byte long back.
+ long seconds = readSevenByteLong(bytes, binSortOffset) ^ SEVEN_BYTE_LONG_SIGN_FLIP;
+ int nanos = bytesToInt(bytes, binSortOffset + 7);
+ timestamp.setTime(seconds * 1000);
+ timestamp.setNanos(nanos);
+ timestampEmpty = false;
+ bytesEmpty = true;
+ }
+
+ /**
+ * The data of TimestampWritableBase can be stored either in a byte[]
+ * or in a Timestamp object. Calling this method ensures that the byte[]
+ * is populated from the Timestamp object if previously empty.
+ */
+ private void checkBytes() {
+ if (bytesEmpty) {
+ // Populate byte[] from Timestamp
+ populateBytes();
+ offset = 0;
+ currentBytes = internalBytes;
+ bytesEmpty = false;
+ }
+ }
+
+ /**
+ *
+ * @return double representation of the timestamp, accurate to nanoseconds
+ */
+ public double getDouble() {
+ double seconds, nanos;
+ if (bytesEmpty) {
+ seconds = TimestampUtils.millisToSeconds(timestamp.getTime());
+ nanos = timestamp.getNanos();
+ } else {
+ seconds = getSeconds();
+ nanos = getNanos();
+ }
+ return seconds + nanos / 1000000000;
+ }
+
+ public static long getLong(Timestamp timestamp) {
+ return timestamp.getTime() / 1000;
+ }
+
+ public void readFields(DataInput in) throws IOException {
+ in.readFully(internalBytes, 0, 4);
+ if (TimestampWritableBase.hasDecimalOrSecondVInt(internalBytes[0])) {
+ in.readFully(internalBytes, 4, 1);
+ int len = (byte) WritableUtils.decodeVIntSize(internalBytes[4]);
+ if (len > 1) {
+ in.readFully(internalBytes, 5, len - 1);
+ }
+
+ int pos = 4 + len;
+ if (hasSecondVInt(internalBytes[4])) {
+ // This indicates there is a second VInt containing the additional bits of the seconds
+ // field.
+ in.readFully(internalBytes, pos, 1);
+ int secondVIntLen = (byte) WritableUtils.decodeVIntSize(internalBytes[pos]);
+ if (secondVIntLen > 1) {
+ in.readFully(internalBytes, pos + 1, secondVIntLen - 1);
+ }
+ pos += secondVIntLen;
+ }
+
+ if (hasTimezoneOffset(internalBytes, 4)) {
+ in.readFully(internalBytes, pos, 1);
+ int tzOffsetLen = WritableUtils.decodeVIntSize(internalBytes[pos]);
+ if (tzOffsetLen > 1) {
+ in.readFully(internalBytes, pos + 1, tzOffsetLen - 1);
+ }
+ }
+ }
+ currentBytes = internalBytes;
+ this.offset = 0;
+ }
+
+ public void write(DataOutput out) throws IOException {
+ checkBytes();
+ out.write(currentBytes, offset, getTotalLength());
+ }
+
+ @Override
+ public int compareTo(TimestampWritableBase t) {
+ checkBytes();
+ long s1 = this.getSeconds();
+ long s2 = t.getSeconds();
+ if (s1 == s2) {
+ int n1 = this.getNanos();
+ int n2 = t.getNanos();
+ if (n1 == n2) {
+ Integer tz1 = getTimezoneOffset();
+ Integer tz2 = t.getTimezoneOffset();
+ if (tz1 == null || tz2 == null) {
+ if (tz1 != null) {
+ return 1;
+ }
+ if (tz2 != null) {
+ return -1;
+ }
+ return 0;
+ }
+ return tz1 - tz2;
+ }
+ return n1 - n2;
+ } else {
+ return s1 < s2 ? -1 : 1;
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ return compareTo((TimestampWritableBase) o) == 0;
+ }
+
+ @Override
+ public String toString() {
+ if (timestampEmpty) {
+ populateTimestamp();
+ }
+
+ String timestampString = timestamp.toString();
+ if (timestampString.length() > 19) {
+ if (timestampString.substring(19, 21).compareTo(".0") == 0) {
+ if (timestampString.length() == 21 || !Character.isDigit(timestampString.charAt(21))) {
+ timestampString = timestampString.substring(0, 19) + timestampString.substring(21);
+ }
+ }
+ }
+
+ return timestampString;
+ }
+
+ @Override
+ public int hashCode() {
+ long seconds = getSeconds();
+ seconds <<= 30; // the nanosecond part fits in 30 bits
+ seconds |= getNanos();
+ Integer tzOffset = getTimezoneOffset();
+ int hash = (int) ((seconds >>> 32) ^ seconds);
+ if (tzOffset != null) {
+ hash ^= tzOffset;
+ }
+ return hash;
+ }
+
+ protected void populateTimestamp() {
+ long seconds = getSeconds();
+ int nanos = getNanos();
+ timestamp.setTime(seconds * 1000);
+ timestamp.setNanos(nanos);
+ Integer tzOffset = getTimezoneOffset();
+ if (timestamp instanceof HiveTimestamp) {
+ Preconditions.checkArgument(tzOffset != null);
+ ((HiveTimestamp) timestamp).setOffsetInMin(tzOffset);
+ } else {
+ Preconditions.checkArgument(tzOffset == null);
+ }
+ timestampEmpty = false;
+ }
+
+ /** Static methods **/
+
+ /**
+ * Gets seconds stored as integer at bytes[offset]
+ * @param bytes
+ * @param offset
+ * @return the number of seconds
+ */
+ public static long getSeconds(byte[] bytes, int offset) {
+ int lowest31BitsOfSecondsAndFlag = bytesToInt(bytes, offset);
+ if (lowest31BitsOfSecondsAndFlag >= 0 || // the "has decimal or second VInt" flag is not set
+ !hasSecondVInt(bytes[offset + 4])) {
+ // The entire seconds field is stored in the first 4 bytes.
+ return lowest31BitsOfSecondsAndFlag & LOWEST_31_BITS_OF_SEC_MASK;
+ }
+
+ // We compose the seconds field from two parts. The lowest 31 bits come from the first four
+ // bytes. The higher-order bits come from the second VInt that follows the nanos field.
+ return ((long) (lowest31BitsOfSecondsAndFlag & LOWEST_31_BITS_OF_SEC_MASK)) |
+ (LazyBinaryUtils.readVLongFromByteArray(bytes,
+ offset + 4 + WritableUtils.decodeVIntSize(bytes[offset + 4])) << 31);
+ }
+
+ public static int getNanos(byte[] bytes, int offset) {
+ int val = readVInt(bytes, offset);
+ if (val < 0) {
+ val |= TIMEZONE_MASK;
+ // This means there is a second VInt present that specifies additional bits of the timestamp.
+ // The reversed nanoseconds value is still encoded in this VInt.
+ val = -val - 1;
+ } else {
+ val &= ~TIMEZONE_MASK;
+ }
+ int len = (int) Math.floor(Math.log10(val)) + 1;
+
+ // Reverse the value
+ int tmp = 0;
+ while (val != 0) {
+ tmp *= 10;
+ tmp += val % 10;
+ val /= 10;
+ }
+ val = tmp;
+
+ if (len < 9) {
+ val *= Math.pow(10, 9 - len);
+ }
+ return val;
+ }
+
+ private static int readVInt(byte[] bytes, int offset) {
+ VInt vInt = LazyBinaryUtils.threadLocalVInt.get();
+ LazyBinaryUtils.readVInt(bytes, offset, vInt);
+ return vInt.value;
+ }
+
+ /**
+ * Writes the Timestamp's serialized value to the internal byte array.
+ */
+ private void populateBytes() {
+ long millis = timestamp.getTime();
+ int nanos = timestamp.getNanos();
+
+ boolean hasTimezone = timestamp instanceof HiveTimestamp;
+ long seconds = TimestampUtils.millisToSeconds(millis);
+ boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
+ int position = 4;
+ boolean hasDecimal = setNanosBytes(nanos, internalBytes, position, hasSecondVInt, hasTimezone);
+
+ int firstInt = (int) seconds;
+ if (hasDecimal || hasSecondVInt || hasTimezone) {
+ firstInt |= DECIMAL_OR_SECOND_VINT_FLAG;
+ } else {
+ firstInt &= LOWEST_31_BITS_OF_SEC_MASK;
+ }
+ intToBytes(firstInt, internalBytes, 0);
+
+ if (hasSecondVInt) {
+ position += WritableUtils.decodeVIntSize(internalBytes[position]);
+ LazyBinaryUtils.writeVLongToByteArray(internalBytes, position, seconds >> 31);
+ }
+
+ if (hasTimezone) {
+ position += WritableUtils.decodeVIntSize(internalBytes[position]);
+ LazyBinaryUtils.writeVLongToByteArray(internalBytes, position,
+ ((HiveTimestamp) timestamp).getOffsetInMin());
+ }
+ }
+
+ /**
+ * Given an integer representing nanoseconds, write its serialized
+ * value to the byte array b at offset
+ *
+ * @param nanos
+ * @param b
+ * @param offset
+ * @return
+ */
+ private static boolean setNanosBytes(int nanos, byte[] b, int offset,
+ boolean hasSecondVInt, boolean hasTimezone) {
+ int decimal = 0;
+ if (nanos != 0) {
+ int counter = 0;
+ while (counter < 9) {
+ decimal *= 10;
+ decimal += nanos % 10;
+ nanos /= 10;
+ counter++;
+ }
+ }
+
+ if (hasSecondVInt || decimal != 0 || hasTimezone) {
+ // We use the sign of the reversed-nanoseconds field to indicate that there is a second VInt
+ // present.
+ int toWrite = decimal;
+ if (hasSecondVInt) {
+ toWrite = -toWrite - 1;
+ }
+ // Decimal ranges in [-1000000000, 999999999]. Use the second MSB to indicate if
+ // timezone is present.
+ // if toWrite >= 0, second MSB is always 0, otherwise it's always 1
+ if (hasTimezone) {
+ if (toWrite >= 0) {
+ toWrite |= TIMEZONE_MASK;
+ } else {
+ toWrite &= ~TIMEZONE_MASK;
+ }
+ }
+ LazyBinaryUtils.writeVLongToByteArray(b, offset, toWrite);
+ }
+ return decimal != 0;
+ }
+
+ public HiveDecimal getHiveDecimal() {
+ if (timestampEmpty) {
+ populateTimestamp();
+ }
+ return getHiveDecimal(timestamp);
+ }
+
+ public static HiveDecimal getHiveDecimal(Timestamp timestamp) {
+ // The BigDecimal class recommends not converting directly from double to BigDecimal,
+ // so we convert through a string...
+ Double timestampDouble = TimestampUtils.getDouble(timestamp);
+ HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
+ return result;
+ }
+
+ public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
+ long seconds = getSeconds(bytes, offset);
+ t.setTime(seconds * 1000);
+ if (hasDecimalOrSecondVInt(bytes[offset])) {
+ t.setNanos(getNanos(bytes, offset + 4));
+ Integer tzOffset = getTimezoneOffset(bytes, offset + 4);
+ if (t instanceof HiveTimestamp) {
+ Preconditions.checkArgument(tzOffset != null);
+ ((HiveTimestamp) t).setOffsetInMin(tzOffset);
+ } else {
+ Preconditions.checkArgument(tzOffset == null);
+ }
+ } else {
+ t.setNanos(0);
+ }
+ }
+
+ private static boolean hasDecimalOrSecondVInt(byte b) {
+ return (b >> 7) != 0;
+ }
+
+ private static boolean hasSecondVInt(byte b) {
+ return WritableUtils.isNegativeVInt(b);
+ }
+
+ private final boolean hasDecimalOrSecondVInt() {
+ return hasDecimalOrSecondVInt(currentBytes[offset]);
+ }
+
+ /**
+ * Writes value into dest at offset
+ * @param value
+ * @param dest
+ * @param offset
+ */
+ protected static void intToBytes(int value, byte[] dest, int offset) {
+ dest[offset] = (byte) ((value >> 24) & 0xFF);
+ dest[offset+1] = (byte) ((value >> 16) & 0xFF);
+ dest[offset+2] = (byte) ((value >> 8) & 0xFF);
+ dest[offset+3] = (byte) (value & 0xFF);
+ }
+
+ /**
+ * Writes value into dest at offset as a seven-byte
+ * serialized long number.
+ */
+ static void sevenByteLongToBytes(long value, byte[] dest, int offset) {
+ dest[offset] = (byte) ((value >> 48) & 0xFF);
+ dest[offset+1] = (byte) ((value >> 40) & 0xFF);
+ dest[offset+2] = (byte) ((value >> 32) & 0xFF);
+ dest[offset+3] = (byte) ((value >> 24) & 0xFF);
+ dest[offset+4] = (byte) ((value >> 16) & 0xFF);
+ dest[offset+5] = (byte) ((value >> 8) & 0xFF);
+ dest[offset+6] = (byte) (value & 0xFF);
+ }
+
+ /**
+ *
+ * @param bytes
+ * @param offset
+ * @return integer represented by the four bytes in bytes
+ * beginning at offset
+ */
+ protected static int bytesToInt(byte[] bytes, int offset) {
+ return ((0xFF & bytes[offset]) << 24)
+ | ((0xFF & bytes[offset+1]) << 16)
+ | ((0xFF & bytes[offset+2]) << 8)
+ | (0xFF & bytes[offset+3]);
+ }
+
+ static long readSevenByteLong(byte[] bytes, int offset) {
+ // We need to shift everything 8 bits left and then shift back to populate the sign field.
+ return (((0xFFL & bytes[offset]) << 56)
+ | ((0xFFL & bytes[offset+1]) << 48)
+ | ((0xFFL & bytes[offset+2]) << 40)
+ | ((0xFFL & bytes[offset+3]) << 32)
+ | ((0xFFL & bytes[offset+4]) << 24)
+ | ((0xFFL & bytes[offset+5]) << 16)
+ | ((0xFFL & bytes[offset+6]) << 8)) >> 8;
+ }
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
index 23dbe6a..98807fb 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
@@ -37,6 +37,7 @@
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveIntervalYearMonthObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveIntervalDayTimeObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector;
@@ -128,6 +129,8 @@
return new LazyDate((LazyDateObjectInspector) oi);
case TIMESTAMP:
return new LazyTimestamp((LazyTimestampObjectInspector) oi);
+ case TIMESTAMPTZ:
+ return new LazyHiveTimestamp((LazyHiveTimestampObjectInspector) oi);
case INTERVAL_YEAR_MONTH:
return new LazyHiveIntervalYearMonth((LazyHiveIntervalYearMonthObjectInspector) oi);
case INTERVAL_DAY_TIME:
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveTimestamp.java
new file mode 100644
index 0000000..e677561
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveTimestamp.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy;
+
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveTimestampObjectInspector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UnsupportedEncodingException;
+
+public class LazyHiveTimestamp
+ extends LazyPrimitive {
+
+ private static final Logger LOG = LoggerFactory.getLogger(LazyHiveTimestamp.class);
+
+ public LazyHiveTimestamp(LazyHiveTimestampObjectInspector oi) {
+ super(oi);
+ data = new HiveTimestampWritable();
+ }
+
+ public LazyHiveTimestamp(LazyHiveTimestamp copy) {
+ super(copy);
+ data = new HiveTimestampWritable(copy.data);
+ }
+
+ @Override
+ public void init(ByteArrayRef bytes, int start, int length) {
+ if (!LazyUtils.isDateMaybe(bytes.getData(), start, length)) {
+ isNull = true;
+ return;
+ }
+
+ String s;
+ HiveTimestamp t = null;
+ try {
+ s = new String(bytes.getData(), start, length, "US-ASCII");
+ if (s.equals("NULL")) {
+ isNull = true;
+ logExceptionMessage(bytes, start, length, "TIMESTAMPTZ");
+ } else {
+ t = HiveTimestamp.valueOf(s);
+ }
+ } catch (UnsupportedEncodingException e) {
+ isNull = true;
+ LOG.error("Unsupported encoding found ", e);
+ } catch (IllegalArgumentException e) {
+ isNull = true;
+ logExceptionMessage(bytes, start, length, "TIMESTAMPTZ");
+ }
+ data.set(t);
+ }
+
+ @Override
+ public HiveTimestampWritable getWritableObject() {
+ return data;
+ }
+
+ public static void writeUTF8(OutputStream out, HiveTimestampWritable i) throws IOException {
+ if (i == null) {
+ // Serialize as time 0
+ out.write(TimestampWritableBase.nullBytes);
+ } else {
+ out.write(i.toString().getBytes("US-ASCII"));
+ }
+ }
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
index 56945d1..d5a9c3b 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
@@ -22,6 +22,7 @@
import java.io.UnsupportedEncodingException;
import java.sql.Timestamp;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -98,7 +99,7 @@ public static void writeUTF8(OutputStream out, TimestampWritable i)
throws IOException {
if (i == null) {
// Serialize as time 0
- out.write(TimestampWritable.nullBytes);
+ out.write(TimestampWritableBase.nullBytes);
} else {
out.write(i.toString().getBytes("US-ASCII"));
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
index 73c72e1..b2bba71 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
@@ -41,6 +41,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveTimestampObjectorInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
@@ -229,7 +230,9 @@ public static void writePrimitiveUTF8(OutputStream out, Object o,
PrimitiveObjectInspector oi, boolean escaped, byte escapeChar,
boolean[] needsEscape) throws IOException {
- switch (oi.getPrimitiveCategory()) {
+ PrimitiveObjectInspector.PrimitiveCategory category = oi.getPrimitiveCategory();
+
+ switch (category) {
case BOOLEAN: {
boolean b = ((BooleanObjectInspector) oi).get(o);
if (b) {
@@ -305,6 +308,11 @@ public static void writePrimitiveUTF8(OutputStream out, Object o,
((TimestampObjectInspector) oi).getPrimitiveWritableObject(o));
break;
}
+ case TIMESTAMPTZ: {
+ LazyHiveTimestamp.writeUTF8(out,
+ ((HiveTimestampObjectorInspector) oi).getPrimitiveWritableObject(o));
+ break;
+ }
case INTERVAL_YEAR_MONTH: {
LazyHiveIntervalYearMonth.writeUTF8(out,
((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o));
@@ -322,7 +330,7 @@ public static void writePrimitiveUTF8(OutputStream out, Object o,
break;
}
default: {
- throw new RuntimeException("Hive internal error.");
+ throw new RuntimeException("Unknown type: " + category);
}
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveTimestampObjectInspector.java
new file mode 100644
index 0000000..d10cf7d
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveTimestampObjectInspector.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyHiveTimestamp;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveTimestampObjectorInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+public class LazyHiveTimestampObjectInspector
+ extends AbstractPrimitiveLazyObjectInspector
+ implements HiveTimestampObjectorInspector {
+
+ public LazyHiveTimestampObjectInspector() {
+ super(TypeInfoFactory.timestamptzTypeInfo);
+ }
+
+ @Override
+ public Object copyObject(Object o) {
+ return o == null ? null : new LazyHiveTimestamp((LazyHiveTimestamp) o);
+ }
+
+ @Override
+ public HiveTimestamp getPrimitiveJavaObject(Object o) {
+ return o == null ? null : ((LazyHiveTimestamp) o).getWritableObject().getTimestamp();
+ }
+
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
index 5601734..10e63a1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
@@ -71,6 +71,8 @@
new LazyDateObjectInspector();
public static final LazyTimestampObjectInspector LAZY_TIMESTAMP_OBJECT_INSPECTOR =
new LazyTimestampObjectInspector();
+ public static final LazyHiveTimestampObjectInspector LAZY_HIVE_TIMESTAMP_OBJECT_INSPECTOR =
+ new LazyHiveTimestampObjectInspector();
public static final LazyHiveIntervalYearMonthObjectInspector LAZY_INTERVAL_YEAR_MONTH_OBJECT_INSPECTOR =
new LazyHiveIntervalYearMonthObjectInspector();
public static final LazyHiveIntervalDayTimeObjectInspector LAZY_INTERVAL_DAY_TIME_OBJECT_INSPECTOR =
@@ -111,6 +113,8 @@ private LazyPrimitiveObjectInspectorFactory() {
LAZY_DATE_OBJECT_INSPECTOR);
cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME),
LAZY_TIMESTAMP_OBJECT_INSPECTOR);
+ cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMPTZ_TYPE_NAME),
+ LAZY_HIVE_TIMESTAMP_OBJECT_INSPECTOR);
cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME),
LAZY_INTERVAL_YEAR_MONTH_OBJECT_INSPECTOR);
cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME),
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
index 52f3527..2f14b0e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
@@ -31,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
@@ -86,6 +87,8 @@
return new LazyBinaryDate((WritableDateObjectInspector) oi);
case TIMESTAMP:
return new LazyBinaryTimestamp((WritableTimestampObjectInspector) oi);
+ case TIMESTAMPTZ:
+ return new LazyBinaryHiveTimestamp((WritableHiveTimestampObjectInspector) oi);
case INTERVAL_YEAR_MONTH:
return new LazyBinaryHiveIntervalYearMonth((WritableHiveIntervalYearMonthObjectInspector) oi);
case INTERVAL_DAY_TIME:
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveTimestamp.java
new file mode 100644
index 0000000..3e5ed3a
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveTimestamp.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazybinary;
+
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveTimestampObjectInspector;
+
+/**
+ * A LazyBinaryObject that encodes HiveTimestamp
+ */
+public class LazyBinaryHiveTimestamp extends
+ LazyBinaryPrimitive {
+
+ public LazyBinaryHiveTimestamp(WritableHiveTimestampObjectInspector oi) {
+ super(oi);
+ data = new HiveTimestampWritable();
+ }
+
+ @Override
+ public void init(ByteArrayRef bytes, int start, int length) {
+ data.set(bytes.getData(), start);
+ }
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 54bfd2d..70f4099 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -24,6 +24,8 @@
import java.util.Map;
import java.util.Properties;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveTimestampObjectorInspector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -460,6 +462,12 @@ public static void serialize(RandomAccessOutput byteStream, Object obj,
t.writeToByteStream(byteStream);
return;
}
+ case TIMESTAMPTZ: {
+ HiveTimestampWritable t = ((HiveTimestampObjectorInspector) poi).
+ getPrimitiveWritableObject(obj);
+ t.writeToByteStream(byteStream);
+ return;
+ }
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthWritable intervalYearMonth =
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
index f8a110d..897fafa 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
@@ -23,6 +23,7 @@
import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -201,8 +202,9 @@ public static void checkObjectByteInfo(ObjectInspector objectInspector,
recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]);
break;
case TIMESTAMP:
+ case TIMESTAMPTZ:
recordInfo.elementOffset = 0;
- recordInfo.elementSize = TimestampWritable.getTotalLength(bytes, offset);
+ recordInfo.elementSize = TimestampWritableBase.getTotalLength(bytes, offset);
break;
case INTERVAL_YEAR_MONTH:
recordInfo.elementOffset = 0;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
index 24b3d4e..c6a3f74 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
@@ -35,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalDayTimeObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalYearMonthObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector;
@@ -123,6 +124,9 @@ private static Converter getConverter(PrimitiveObjectInspector inputOI,
return new PrimitiveObjectInspectorConverter.TimestampConverter(
inputOI,
(SettableTimestampObjectInspector) outputOI);
+ case TIMESTAMPTZ:
+ return new PrimitiveObjectInspectorConverter.TimestampTZConverter(
+ inputOI, (SettableHiveTimestampObjectInspector) outputOI);
case INTERVAL_YEAR_MONTH:
return new PrimitiveObjectInspectorConverter.HiveIntervalYearMonthConverter(
inputOI,
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index 1ac72c6..df38b67 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -27,6 +27,9 @@
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveTimestampObjectorInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveTimestampObjectInspector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.serde.serdeConstants;
@@ -412,6 +415,10 @@ public static Object copyToStandardObject(
result = loi.getPrimitiveJavaObject(o);
if (loi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP) {
result = PrimitiveObjectInspectorFactory.javaTimestampObjectInspector.copyObject(result);
+ } else if (loi.getPrimitiveCategory() ==
+ PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMPTZ) {
+ result = PrimitiveObjectInspectorFactory.javaHiveTimestampObjectInspector.
+ copyObject(result);
}
break;
case WRITABLE:
@@ -689,6 +696,10 @@ public static int hashCode(Object o, ObjectInspector objIns) {
TimestampWritable t = ((TimestampObjectInspector) poi)
.getPrimitiveWritableObject(o);
return t.hashCode();
+ case TIMESTAMPTZ:
+ HiveTimestampWritable ht = ((HiveTimestampObjectorInspector) poi)
+ .getPrimitiveWritableObject(o);
+ return ht.hashCode();
case INTERVAL_YEAR_MONTH:
HiveIntervalYearMonthWritable intervalYearMonth = ((HiveIntervalYearMonthObjectInspector) poi)
.getPrimitiveWritableObject(o);
@@ -948,6 +959,13 @@ public static int compare(Object o1, ObjectInspector oi1, Object o2,
.getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
+ case TIMESTAMPTZ: {
+ HiveTimestampWritable hts1 = ((HiveTimestampObjectorInspector) poi1).
+ getPrimitiveWritableObject(o1);
+ HiveTimestampWritable hts2 = ((HiveTimestampObjectorInspector) poi2).
+ getPrimitiveWritableObject(o2);
+ return hts1.compareTo(hts2);
+ }
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1)
.getPrimitiveWritableObject(o1);
@@ -1315,6 +1333,8 @@ private static boolean isInstanceOfSettablePrimitiveOI(PrimitiveObjectInspector
return oi instanceof SettableDateObjectInspector;
case TIMESTAMP:
return oi instanceof SettableTimestampObjectInspector;
+ case TIMESTAMPTZ:
+ return oi instanceof SettableHiveTimestampObjectInspector;
case INTERVAL_YEAR_MONTH:
return oi instanceof SettableHiveIntervalYearMonthObjectInspector;
case INTERVAL_DAY_TIME:
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
index 70633f3..e2c15ff 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
@@ -31,8 +31,8 @@
*/
public static enum PrimitiveCategory {
VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING,
- DATE, TIMESTAMP, BINARY, DECIMAL, VARCHAR, CHAR, INTERVAL_YEAR_MONTH, INTERVAL_DAY_TIME,
- UNKNOWN
+ DATE, TIMESTAMP, TIMESTAMPTZ, BINARY, DECIMAL, VARCHAR, CHAR, INTERVAL_YEAR_MONTH,
+ INTERVAL_DAY_TIME, UNKNOWN
};
public PrimitiveTypeInfo getTypeInfo();
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveTimestampObjectorInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveTimestampObjectorInspector.java
new file mode 100644
index 0000000..96f73aa
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveTimestampObjectorInspector.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+/**
+ * For HiveTimestamp.
+ */
+public interface HiveTimestampObjectorInspector extends PrimitiveObjectInspector {
+
+ HiveTimestampWritable getPrimitiveWritableObject(Object o);
+
+ HiveTimestamp getPrimitiveJavaObject(Object o);
+
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveTimestampObjectInspector.java
new file mode 100644
index 0000000..12b7256
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveTimestampObjectInspector.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import java.sql.Timestamp;
+
+public class JavaHiveTimestampObjectInspector
+ extends AbstractPrimitiveJavaObjectInspector implements SettableHiveTimestampObjectInspector {
+
+ protected JavaHiveTimestampObjectInspector() {
+ super(TypeInfoFactory.timestamptzTypeInfo);
+ }
+
+ @Override
+ public Object set(Object o, byte[] bytes, int offset) {
+ TimestampWritableBase.setTimestamp((Timestamp) o, bytes, offset);
+ return o;
+ }
+
+ @Override
+ public Object set(Object o, HiveTimestamp t) {
+ if (t == null) {
+ return null;
+ }
+ HiveTimestamp hts = (HiveTimestamp) o;
+ hts.setTime(t.getTime());
+ hts.setNanos(t.getNanos());
+ hts.setOffsetInMin(t.getOffsetInMin());
+ return hts;
+ }
+
+ @Override
+ public Object set(Object o, HiveTimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+ HiveTimestamp hts = (HiveTimestamp) o;
+ HiveTimestamp source = t.getTimestamp();
+ hts.setTime(source.getTime());
+ hts.setNanos(source.getNanos());
+ hts.setOffsetInMin(source.getOffsetInMin());
+ return hts;
+ }
+
+ @Override
+ public Object create(byte[] bytes, int offset) {
+ return HiveTimestampWritable.createHiveTimestamp(bytes, offset);
+ }
+
+ @Override
+ public Object create(HiveTimestamp t) {
+ return copyObject(t);
+ }
+
+ @Override
+ public HiveTimestampWritable getPrimitiveWritableObject(Object o) {
+ return o == null ? null : new HiveTimestampWritable((HiveTimestamp) o);
+ }
+
+ @Override
+ public HiveTimestamp getPrimitiveJavaObject(Object o) {
+ return o == null ? null : (HiveTimestamp) o;
+ }
+
+ @Override
+ public Object copyObject(Object o) {
+ if (o == null) {
+ return null;
+ }
+ HiveTimestamp source = (HiveTimestamp) o;
+ HiveTimestamp copy = new HiveTimestamp(source.getTime(), source.getOffsetInMin());
+ copy.setNanos(source.getNanos());
+ return copy;
+ }
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
index 509189e..c49531e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
@@ -20,6 +20,7 @@
import java.sql.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableBase;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
public class JavaTimestampObjectInspector
@@ -63,7 +64,7 @@ public Object set(Object o, Timestamp value) {
}
public Object set(Object o, byte[] bytes, int offset) {
- TimestampWritable.setTimestamp((Timestamp) o, bytes, offset);
+ TimestampWritableBase.setTimestamp((Timestamp) o, bytes, offset);
return o;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
index e08ad43..ffde4be 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
@@ -25,6 +25,7 @@
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
@@ -292,6 +293,27 @@ public Object convert(Object input) {
}
}
+ public static class TimestampTZConverter implements Converter {
+ final PrimitiveObjectInspector inputOI;
+ final SettableHiveTimestampObjectInspector outputOI;
+ final Object r;
+
+ public TimestampTZConverter(PrimitiveObjectInspector inputOI,
+ SettableHiveTimestampObjectInspector outputOI) {
+ this.inputOI = inputOI;
+ this.outputOI = outputOI;
+ r = outputOI.create(new HiveTimestamp(0, 0));
+ }
+
+ @Override
+ public Object convert(Object input) {
+ if (input == null) {
+ return null;
+ }
+ return outputOI.set(r, PrimitiveObjectInspectorUtils.getHiveTimestamp(input, inputOI));
+ }
+ }
+
public static class HiveIntervalYearMonthConverter implements Converter {
PrimitiveObjectInspector inputOI;
SettableHiveIntervalYearMonthObjectInspector outputOI;
@@ -466,6 +488,10 @@ public Text convert(Object input) {
t.set(((TimestampObjectInspector) inputOI)
.getPrimitiveWritableObject(input).toString());
return t;
+ case TIMESTAMPTZ:
+ t.set(((HiveTimestampObjectorInspector) inputOI)
+ .getPrimitiveWritableObject(input).toString());
+ return t;
case INTERVAL_YEAR_MONTH:
t.set(((HiveIntervalYearMonthObjectInspector) inputOI)
.getPrimitiveWritableObject(input).toString());
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
index 2ed0843..064fbd1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -30,6 +30,7 @@
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -87,6 +88,8 @@
new WritableDateObjectInspector();
public static final WritableTimestampObjectInspector writableTimestampObjectInspector =
new WritableTimestampObjectInspector();
+ public static final WritableHiveTimestampObjectInspector writableHiveTimestampObjectInspector =
+ new WritableHiveTimestampObjectInspector();
public static final WritableHiveIntervalYearMonthObjectInspector writableHiveIntervalYearMonthObjectInspector =
new WritableHiveIntervalYearMonthObjectInspector();
public static final WritableHiveIntervalDayTimeObjectInspector writableHiveIntervalDayTimeObjectInspector =
@@ -124,6 +127,8 @@
writableDateObjectInspector);
cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME),
writableTimestampObjectInspector);
+ cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMPTZ_TYPE_NAME),
+ writableHiveTimestampObjectInspector);
cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME),
writableHiveIntervalYearMonthObjectInspector);
cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME),
@@ -149,6 +154,7 @@
primitiveCategoryToWritableOI.put(PrimitiveCategory.VOID, writableVoidObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.DATE, writableDateObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.TIMESTAMP, writableTimestampObjectInspector);
+ primitiveCategoryToWritableOI.put(PrimitiveCategory.TIMESTAMPTZ, writableHiveTimestampObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.INTERVAL_YEAR_MONTH, writableHiveIntervalYearMonthObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.INTERVAL_DAY_TIME, writableHiveIntervalDayTimeObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.BINARY, writableBinaryObjectInspector);
@@ -181,6 +187,8 @@
new JavaDateObjectInspector();
public static final JavaTimestampObjectInspector javaTimestampObjectInspector =
new JavaTimestampObjectInspector();
+ public static final JavaHiveTimestampObjectInspector javaHiveTimestampObjectInspector =
+ new JavaHiveTimestampObjectInspector();
public static final JavaHiveIntervalYearMonthObjectInspector javaHiveIntervalYearMonthObjectInspector =
new JavaHiveIntervalYearMonthObjectInspector();
public static final JavaHiveIntervalDayTimeObjectInspector javaHiveIntervalDayTimeObjectInspector =
@@ -218,6 +226,8 @@
javaDateObjectInspector);
cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME),
javaTimestampObjectInspector);
+ cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.timestamptzTypeInfo,
+ javaHiveTimestampObjectInspector);
cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME),
javaHiveIntervalYearMonthObjectInspector);
cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME),
@@ -243,6 +253,7 @@
primitiveCategoryToJavaOI.put(PrimitiveCategory.VOID, javaVoidObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.DATE, javaDateObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.TIMESTAMP, javaTimestampObjectInspector);
+ primitiveCategoryToJavaOI.put(PrimitiveCategory.TIMESTAMPTZ, javaHiveTimestampObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.INTERVAL_YEAR_MONTH, javaHiveIntervalYearMonthObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.INTERVAL_DAY_TIME, javaHiveIntervalDayTimeObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.BINARY, javaByteArrayObjectInspector);
@@ -336,6 +347,8 @@ public static ConstantObjectInspector getPrimitiveWritableConstantObjectInspecto
return new WritableConstantDateObjectInspector((DateWritable)value);
case TIMESTAMP:
return new WritableConstantTimestampObjectInspector((TimestampWritable)value);
+ case TIMESTAMPTZ:
+ return new WritableConstantHiveTimestampObjectInspector((HiveTimestampWritable) value);
case INTERVAL_YEAR_MONTH:
return new WritableConstantHiveIntervalYearMonthObjectInspector((HiveIntervalYearMonthWritable) value);
case INTERVAL_DAY_TIME:
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
index 51b529e..3b248d5 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
@@ -27,7 +27,9 @@
import java.util.HashMap;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
import org.apache.hadoop.hive.ql.util.TimestampUtils;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.HiveChar;
@@ -226,6 +228,9 @@ static void registerType(PrimitiveTypeEntry t) {
public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry(
PrimitiveCategory.TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME, null,
Timestamp.class, TimestampWritable.class);
+ public static final PrimitiveTypeEntry timestampTZTypeEntry = new PrimitiveTypeEntry(
+ PrimitiveCategory.TIMESTAMPTZ, serdeConstants.TIMESTAMPTZ_TYPE_NAME, null,
+ HiveTimestamp.class, HiveTimestampWritable.class);
public static final PrimitiveTypeEntry intervalYearMonthTypeEntry = new PrimitiveTypeEntry(
PrimitiveCategory.INTERVAL_YEAR_MONTH, serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, null,
HiveIntervalYearMonth.class, HiveIntervalYearMonthWritable.class);
@@ -261,6 +266,7 @@ static void registerType(PrimitiveTypeEntry t) {
registerType(shortTypeEntry);
registerType(dateTypeEntry);
registerType(timestampTypeEntry);
+ registerType(timestampTZTypeEntry);
registerType(intervalYearMonthTypeEntry);
registerType(intervalDayTimeTypeEntry);
registerType(decimalTypeEntry);
@@ -439,6 +445,10 @@ public static boolean comparePrimitiveObjects(Object o1,
return ((TimestampObjectInspector) oi1).getPrimitiveWritableObject(o1)
.equals(((TimestampObjectInspector) oi2).getPrimitiveWritableObject(o2));
}
+ case TIMESTAMPTZ: {
+ return ((HiveTimestampObjectorInspector) oi1).getPrimitiveWritableObject(o1).equals(
+ ((HiveTimestampObjectorInspector) oi2).getPrimitiveWritableObject(o2));
+ }
case INTERVAL_YEAR_MONTH: {
return ((HiveIntervalYearMonthObjectInspector) oi1).getPrimitiveWritableObject(o1)
.equals(((HiveIntervalYearMonthObjectInspector) oi2).getPrimitiveWritableObject(o2));
@@ -461,39 +471,6 @@ public static boolean comparePrimitiveObjects(Object o1,
}
/**
- * Convert a primitive object to double.
- */
- public static double convertPrimitiveToDouble(Object o, PrimitiveObjectInspector oi) {
- switch (oi.getPrimitiveCategory()) {
- case BOOLEAN:
- return ((BooleanObjectInspector) oi).get(o) ? 1 : 0;
- case BYTE:
- return ((ByteObjectInspector) oi).get(o);
- case SHORT:
- return ((ShortObjectInspector) oi).get(o);
- case INT:
- return ((IntObjectInspector) oi).get(o);
- case LONG:
- return ((LongObjectInspector) oi).get(o);
- case FLOAT:
- return ((FloatObjectInspector) oi).get(o);
- case DOUBLE:
- return ((DoubleObjectInspector) oi).get(o);
- case STRING:
- return Double.valueOf(((StringObjectInspector) oi).getPrimitiveJavaObject(o));
- case TIMESTAMP:
- return ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)
- .getDouble();
- case DECIMAL:
- // TODO: lossy conversion!
- return ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o).doubleValue();
- case DATE: // unsupported conversion
- default:
- throw new NumberFormatException();
- }
- }
-
- /**
* Compare 2 Primitive Objects with their Object Inspector, conversions
* allowed. Note that NULL does not equal to NULL according to SQL standard.
*/
@@ -509,8 +486,7 @@ public static boolean comparePrimitiveObjectsWithConversion(Object o1,
// If not equal, convert all to double and compare
try {
- return convertPrimitiveToDouble(o1, oi1) == convertPrimitiveToDouble(o2,
- oi2);
+ return getDouble(o1, oi1) == getDouble(o2, oi2);
} catch (NumberFormatException e) {
return false;
}
@@ -562,6 +538,10 @@ public static boolean getBoolean(Object o, PrimitiveObjectInspector oi) {
result = (((TimestampObjectInspector) oi)
.getPrimitiveWritableObject(o).getSeconds() != 0);
break;
+ case TIMESTAMPTZ:
+ result = (((HiveTimestampObjectorInspector) oi)
+ .getPrimitiveWritableObject(o).getSeconds() != 0);
+ break;
case DECIMAL:
result = HiveDecimal.ZERO.compareTo(
((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o)) != 0;
@@ -652,6 +632,10 @@ public static int getInt(Object o, PrimitiveObjectInspector oi) {
result = (int) (((TimestampObjectInspector) oi)
.getPrimitiveWritableObject(o).getSeconds());
break;
+ case TIMESTAMPTZ:
+ result = (int) ((HiveTimestampObjectorInspector) oi)
+ .getPrimitiveWritableObject(o).getSeconds();
+ break;
case DECIMAL:
result = ((HiveDecimalObjectInspector) oi)
.getPrimitiveJavaObject(o).intValue(); // TODO: lossy conversion!
@@ -716,6 +700,10 @@ public static long getLong(Object o, PrimitiveObjectInspector oi) {
result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)
.getSeconds();
break;
+ case TIMESTAMPTZ:
+ result = ((HiveTimestampObjectorInspector) oi).getPrimitiveWritableObject(o)
+ .getSeconds();
+ break;
case DECIMAL:
result = ((HiveDecimalObjectInspector) oi)
.getPrimitiveJavaObject(o).longValue(); // TODO: lossy conversion!
@@ -772,6 +760,9 @@ public static double getDouble(Object o, PrimitiveObjectInspector oi) {
case TIMESTAMP:
result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getDouble();
break;
+ case TIMESTAMPTZ:
+ result = ((HiveTimestampObjectorInspector) oi).getPrimitiveWritableObject(o).getDouble();
+ break;
case DECIMAL:
result = ((HiveDecimalObjectInspector) oi)
.getPrimitiveJavaObject(o).doubleValue();
@@ -858,6 +849,9 @@ public static String getString(Object o, PrimitiveObjectInspector oi) {
case TIMESTAMP:
result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).toString();
break;
+ case TIMESTAMPTZ:
+ result = ((HiveTimestampObjectorInspector) oi).getPrimitiveWritableObject(o).toString();
+ break;
case INTERVAL_YEAR_MONTH:
result = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o).toString();
break;
@@ -999,6 +993,10 @@ public static HiveDecimal getHiveDecimal(Object o, PrimitiveObjectInspector oi)
.getDouble();
result = HiveDecimal.create(ts.toString());
break;
+ case TIMESTAMPTZ:
+ Double hts = ((HiveTimestampObjectorInspector) oi).getPrimitiveWritableObject(o).getDouble();
+ result = HiveDecimal.create(hts.toString());
+ break;
case DECIMAL:
result = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
break;
@@ -1010,6 +1008,32 @@ public static HiveDecimal getHiveDecimal(Object o, PrimitiveObjectInspector oi)
return result;
}
+ public static HiveTimestamp getHiveTimestamp(Object o, PrimitiveObjectInspector oi) {
+ if (o == null) {
+ return null;
+ }
+
+ switch (oi.getPrimitiveCategory()) {
+ case STRING: {
+ StringObjectInspector soi = (StringObjectInspector) oi;
+ String s = soi.getPrimitiveJavaObject(o).trim();
+ return TimestampUtils.getHiveTimestampOrNull(s);
+ }
+ case CHAR:
+ case VARCHAR: {
+ String s = getString(o, oi).trim();
+ return TimestampUtils.getHiveTimestampOrNull(s);
+ }
+ case TIMESTAMPTZ: {
+ return ((HiveTimestampObjectorInspector) oi).getPrimitiveWritableObject(o).getTimestamp();
+ }
+ default: {
+ throw new RuntimeException("Cannot convert to TIMESTAMPTZ from: "
+ + oi.getTypeName());
+ }
+ }
+ }
+
public static Date getDate(Object o, PrimitiveObjectInspector oi) {
if (o == null) {
return null;
@@ -1046,6 +1070,10 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) {
result = DateWritable.timeToDate(
((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getSeconds());
break;
+ case TIMESTAMPTZ:
+ result = DateWritable.timeToDate(
+ ((HiveTimestampObjectorInspector) oi).getPrimitiveWritableObject(o).getSeconds());
+ break;
default:
throw new RuntimeException("Cannot convert to Date from: "
+ oi.getTypeName());
@@ -1115,6 +1143,11 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI,
case TIMESTAMP:
result = ((TimestampObjectInspector) inputOI).getPrimitiveWritableObject(o).getTimestamp();
break;
+ case TIMESTAMPTZ:
+ HiveTimestamp hts = ((HiveTimestampObjectorInspector) inputOI).
+ getPrimitiveWritableObject(o).getTimestamp();
+ result = new Timestamp(hts.getTime());
+ result.setNanos(hts.getNanos());
default:
throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ inputOI.getTypeName());
@@ -1249,6 +1282,7 @@ public static PrimitiveGrouping getPrimitiveGrouping(PrimitiveCategory primitive
return PrimitiveGrouping.BOOLEAN_GROUP;
case TIMESTAMP:
case DATE:
+ case TIMESTAMPTZ:
return PrimitiveGrouping.DATE_GROUP;
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveTimestampObjectInspector.java
new file mode 100644
index 0000000..8a8d56d
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveTimestampObjectInspector.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+
+public interface SettableHiveTimestampObjectInspector extends HiveTimestampObjectorInspector {
+
+ Object set(Object o, byte[] bytes, int offset);
+
+ Object set(Object o, HiveTimestamp t);
+
+ Object set(Object o, HiveTimestampWritable t);
+
+ Object create(byte[] bytes, int offset);
+
+ Object create(HiveTimestamp t);
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveTimestampObjectInspector.java
new file mode 100644
index 0000000..108b788
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveTimestampObjectInspector.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+
+public class WritableConstantHiveTimestampObjectInspector extends
+ WritableHiveTimestampObjectInspector implements ConstantObjectInspector {
+
+ private HiveTimestampWritable value;
+
+ public WritableConstantHiveTimestampObjectInspector(HiveTimestampWritable value) {
+ this.value = value;
+ }
+
+ @Override
+ public HiveTimestampWritable getWritableConstantValue() {
+ return value;
+ }
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveTimestampObjectInspector.java
new file mode 100644
index 0000000..2411c81
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveTimestampObjectInspector.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.apache.hadoop.hive.serde2.io.HiveTimestampWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+public class WritableHiveTimestampObjectInspector extends
+ AbstractPrimitiveWritableObjectInspector implements
+ SettableHiveTimestampObjectInspector {
+
+ public WritableHiveTimestampObjectInspector() {
+ super(TypeInfoFactory.timestamptzTypeInfo);
+ }
+
+ @Override
+ public Object set(Object o, byte[] bytes, int offset) {
+ ((HiveTimestampWritable) o).set(bytes, offset);
+ return o;
+ }
+
+ @Override
+ public Object set(Object o, HiveTimestamp t) {
+ if (t == null) {
+ return null;
+ }
+ ((HiveTimestampWritable) o).set(t);
+ return o;
+ }
+
+ @Override
+ public Object set(Object o, HiveTimestampWritable t) {
+ if (t == null) {
+ return null;
+ }
+ ((HiveTimestampWritable) o).set(t);
+ return o;
+ }
+
+ @Override
+ public Object create(byte[] bytes, int offset) {
+ return new HiveTimestampWritable(bytes, offset);
+ }
+
+ @Override
+ public Object create(HiveTimestamp t) {
+ return new HiveTimestampWritable(t);
+ }
+
+ @Override
+ public HiveTimestamp getPrimitiveJavaObject(Object o) {
+ return o == null ? null : ((HiveTimestampWritable) o).getTimestamp();
+ }
+
+ @Override
+ public Object copyObject(Object o) {
+ return o == null ? null : new HiveTimestampWritable((HiveTimestampWritable) o);
+ }
+
+ @Override
+ public HiveTimestampWritable getPrimitiveWritableObject(Object o) {
+ return o == null ? null : (HiveTimestampWritable) o;
+ }
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
index 0ad8c02..7b9634c 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
@@ -70,6 +70,9 @@
TIMESTAMP_TYPE("TIMESTAMP",
java.sql.Types.TIMESTAMP,
TTypeId.TIMESTAMP_TYPE),
+ TIMESTAMPTZ_TYPE("TIMESTAMPTZ",
+ java.sql.Types.OTHER,
+ TTypeId.TIMESTAMPTZ_TYPE),
INTERVAL_YEAR_MONTH_TYPE("INTERVAL_YEAR_MONTH",
java.sql.Types.OTHER,
TTypeId.INTERVAL_YEAR_MONTH_TYPE),
@@ -225,6 +228,9 @@ public static Type getType(TypeInfo typeInfo) {
case TIMESTAMP: {
return Type.TIMESTAMP_TYPE;
}
+ case TIMESTAMPTZ: {
+ return Type.TIMESTAMPTZ_TYPE;
+ }
case INTERVAL_YEAR_MONTH: {
return Type.INTERVAL_YEAR_MONTH_TYPE;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
index 43c4819..4982ac8 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
@@ -55,6 +55,7 @@ private TypeInfoFactory() {
public static final PrimitiveTypeInfo shortTypeInfo = new PrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME);
public static final PrimitiveTypeInfo dateTypeInfo = new PrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME);
public static final PrimitiveTypeInfo timestampTypeInfo = new PrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME);
+ public static final PrimitiveTypeInfo timestamptzTypeInfo = new PrimitiveTypeInfo(serdeConstants.TIMESTAMPTZ_TYPE_NAME);
public static final PrimitiveTypeInfo intervalYearMonthTypeInfo = new PrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
public static final PrimitiveTypeInfo intervalDayTimeTypeInfo = new PrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
public static final PrimitiveTypeInfo binaryTypeInfo = new PrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME);
@@ -85,6 +86,7 @@ private TypeInfoFactory() {
cachedPrimitiveTypeInfo.put(serdeConstants.SMALLINT_TYPE_NAME, shortTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.DATE_TYPE_NAME, dateTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.TIMESTAMP_TYPE_NAME, timestampTypeInfo);
+ cachedPrimitiveTypeInfo.put(serdeConstants.TIMESTAMPTZ_TYPE_NAME, timestamptzTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, intervalYearMonthTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, intervalDayTimeTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.BINARY_TYPE_NAME, binaryTypeInfo);
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
index 3c483cc..d7494cd 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
@@ -17,9 +17,6 @@
*/
package org.apache.hadoop.hive.serde2.io;
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
@@ -35,13 +32,21 @@
import java.util.Random;
import java.util.TimeZone;
+import com.google.code.tempusfugit.concurrency.ConcurrentRule;
+import com.google.code.tempusfugit.concurrency.RepeatingRule;
+import com.google.code.tempusfugit.concurrency.annotations.Concurrent;
+import com.google.code.tempusfugit.concurrency.annotations.Repeating;
import org.apache.hadoop.hive.ql.util.TimestampUtils;
-import org.junit.*;
-import static org.junit.Assert.*;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
public class TestTimestampWritable {
@@ -494,6 +499,22 @@ public void testBinarySortable() {
}
@Test
+ public void test2ndMSBOfDecimal() {
+ // The current decimal part ranges in [-1000000000, 999999999]. We should be able to use its
+ // second MSB to indicate if a timezone offset exists
+ int decimal = -1000000000;
+ final int mask = 1 << 30;
+ while (decimal < 0) {
+ assertTrue((decimal & mask) != 0);
+ decimal++;
+ }
+ while (decimal <= 999999999) {
+ assertTrue((decimal & mask) == 0);
+ decimal++;
+ }
+ }
+
+ @Test
public void testSetTimestamp() {
// one VInt without nanos
verifySetTimestamp(1000);
diff --git a/service-rpc/if/TCLIService.thrift b/service-rpc/if/TCLIService.thrift
index a4fa7b0..613d4b2 100644
--- a/service-rpc/if/TCLIService.thrift
+++ b/service-rpc/if/TCLIService.thrift
@@ -63,6 +63,9 @@ enum TProtocolVersion {
// V9 adds support for serializing ResultSets in SerDe
HIVE_CLI_SERVICE_PROTOCOL_V9
+
+ // V10 adds timestamptz type
+ HIVE_CLI_SERVICE_PROTOCOL_V10
}
enum TTypeId {
@@ -87,7 +90,8 @@ enum TTypeId {
VARCHAR_TYPE,
CHAR_TYPE,
INTERVAL_YEAR_MONTH_TYPE,
- INTERVAL_DAY_TIME_TYPE
+ INTERVAL_DAY_TIME_TYPE,
+ TIMESTAMPTZ_TYPE
}
const set PRIMITIVE_TYPES = [
@@ -107,7 +111,8 @@ const set PRIMITIVE_TYPES = [
TTypeId.VARCHAR_TYPE,
TTypeId.CHAR_TYPE,
TTypeId.INTERVAL_YEAR_MONTH_TYPE,
- TTypeId.INTERVAL_DAY_TIME_TYPE
+ TTypeId.INTERVAL_DAY_TIME_TYPE,
+ TTypeId.TIMESTAMPTZ_TYPE
]
const set COMPLEX_TYPES = [
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
index 991cb2e..201ec0d 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
@@ -28,6 +28,7 @@ TCLIServiceConstants::TCLIServiceConstants() {
PRIMITIVE_TYPES.insert((TTypeId::type)19);
PRIMITIVE_TYPES.insert((TTypeId::type)20);
PRIMITIVE_TYPES.insert((TTypeId::type)21);
+ PRIMITIVE_TYPES.insert((TTypeId::type)22);
COMPLEX_TYPES.insert((TTypeId::type)10);
COMPLEX_TYPES.insert((TTypeId::type)11);
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
index 2f460e8..aaf9fc5 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
@@ -22,7 +22,8 @@ int _kTProtocolVersionValues[] = {
TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V6,
TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V7,
TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V8,
- TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V9
+ TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V9,
+ TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V10
};
const char* _kTProtocolVersionNames[] = {
"HIVE_CLI_SERVICE_PROTOCOL_V1",
@@ -33,9 +34,10 @@ const char* _kTProtocolVersionNames[] = {
"HIVE_CLI_SERVICE_PROTOCOL_V6",
"HIVE_CLI_SERVICE_PROTOCOL_V7",
"HIVE_CLI_SERVICE_PROTOCOL_V8",
- "HIVE_CLI_SERVICE_PROTOCOL_V9"
+ "HIVE_CLI_SERVICE_PROTOCOL_V9",
+ "HIVE_CLI_SERVICE_PROTOCOL_V10"
};
-const std::map _TProtocolVersion_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(9, _kTProtocolVersionValues, _kTProtocolVersionNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map _TProtocolVersion_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(10, _kTProtocolVersionValues, _kTProtocolVersionNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
int _kTTypeIdValues[] = {
TTypeId::BOOLEAN_TYPE,
@@ -59,7 +61,8 @@ int _kTTypeIdValues[] = {
TTypeId::VARCHAR_TYPE,
TTypeId::CHAR_TYPE,
TTypeId::INTERVAL_YEAR_MONTH_TYPE,
- TTypeId::INTERVAL_DAY_TIME_TYPE
+ TTypeId::INTERVAL_DAY_TIME_TYPE,
+ TTypeId::TIMESTAMPTZ_TYPE
};
const char* _kTTypeIdNames[] = {
"BOOLEAN_TYPE",
@@ -83,9 +86,10 @@ const char* _kTTypeIdNames[] = {
"VARCHAR_TYPE",
"CHAR_TYPE",
"INTERVAL_YEAR_MONTH_TYPE",
- "INTERVAL_DAY_TIME_TYPE"
+ "INTERVAL_DAY_TIME_TYPE",
+ "TIMESTAMPTZ_TYPE"
};
-const std::map _TTypeId_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(22, _kTTypeIdValues, _kTTypeIdNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map _TTypeId_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(23, _kTTypeIdValues, _kTTypeIdNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
int _kTStatusCodeValues[] = {
TStatusCode::SUCCESS_STATUS,
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
index b249544..5604799 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
@@ -29,7 +29,8 @@ struct TProtocolVersion {
HIVE_CLI_SERVICE_PROTOCOL_V6 = 5,
HIVE_CLI_SERVICE_PROTOCOL_V7 = 6,
HIVE_CLI_SERVICE_PROTOCOL_V8 = 7,
- HIVE_CLI_SERVICE_PROTOCOL_V9 = 8
+ HIVE_CLI_SERVICE_PROTOCOL_V9 = 8,
+ HIVE_CLI_SERVICE_PROTOCOL_V10 = 9
};
};
@@ -58,7 +59,8 @@ struct TTypeId {
VARCHAR_TYPE = 18,
CHAR_TYPE = 19,
INTERVAL_YEAR_MONTH_TYPE = 20,
- INTERVAL_DAY_TIME_TYPE = 21
+ INTERVAL_DAY_TIME_TYPE = 21,
+ TIMESTAMPTZ_TYPE = 22
};
};
diff --git a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java
index 930bed7..762d46a 100644
--- a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java
+++ b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java
@@ -55,6 +55,7 @@
PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.CHAR_TYPE);
PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.INTERVAL_YEAR_MONTH_TYPE);
PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.INTERVAL_DAY_TIME_TYPE);
+ PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.TIMESTAMPTZ_TYPE);
}
public static final Set COMPLEX_TYPES = new HashSet();
diff --git a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java
index bce2a0c..18a7825 100644
--- a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java
+++ b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java
@@ -20,7 +20,8 @@
HIVE_CLI_SERVICE_PROTOCOL_V6(5),
HIVE_CLI_SERVICE_PROTOCOL_V7(6),
HIVE_CLI_SERVICE_PROTOCOL_V8(7),
- HIVE_CLI_SERVICE_PROTOCOL_V9(8);
+ HIVE_CLI_SERVICE_PROTOCOL_V9(8),
+ HIVE_CLI_SERVICE_PROTOCOL_V10(9);
private final int value;
@@ -59,6 +60,8 @@ public static TProtocolVersion findByValue(int value) {
return HIVE_CLI_SERVICE_PROTOCOL_V8;
case 8:
return HIVE_CLI_SERVICE_PROTOCOL_V9;
+ case 9:
+ return HIVE_CLI_SERVICE_PROTOCOL_V10;
default:
return null;
}
diff --git a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java
index a3735eb..1b062b7 100644
--- a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java
+++ b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java
@@ -33,7 +33,8 @@
VARCHAR_TYPE(18),
CHAR_TYPE(19),
INTERVAL_YEAR_MONTH_TYPE(20),
- INTERVAL_DAY_TIME_TYPE(21);
+ INTERVAL_DAY_TIME_TYPE(21),
+ TIMESTAMPTZ_TYPE(22);
private final int value;
@@ -98,6 +99,8 @@ public static TTypeId findByValue(int value) {
return INTERVAL_YEAR_MONTH_TYPE;
case 21:
return INTERVAL_DAY_TIME_TYPE;
+ case 22:
+ return TIMESTAMPTZ_TYPE;
default:
return null;
}
diff --git a/service-rpc/src/gen/thrift/gen-php/Types.php b/service-rpc/src/gen/thrift/gen-php/Types.php
index 786c773..4445092 100644
--- a/service-rpc/src/gen/thrift/gen-php/Types.php
+++ b/service-rpc/src/gen/thrift/gen-php/Types.php
@@ -25,6 +25,7 @@ final class TProtocolVersion {
const HIVE_CLI_SERVICE_PROTOCOL_V7 = 6;
const HIVE_CLI_SERVICE_PROTOCOL_V8 = 7;
const HIVE_CLI_SERVICE_PROTOCOL_V9 = 8;
+ const HIVE_CLI_SERVICE_PROTOCOL_V10 = 9;
static public $__names = array(
0 => 'HIVE_CLI_SERVICE_PROTOCOL_V1',
1 => 'HIVE_CLI_SERVICE_PROTOCOL_V2',
@@ -35,6 +36,7 @@ final class TProtocolVersion {
6 => 'HIVE_CLI_SERVICE_PROTOCOL_V7',
7 => 'HIVE_CLI_SERVICE_PROTOCOL_V8',
8 => 'HIVE_CLI_SERVICE_PROTOCOL_V9',
+ 9 => 'HIVE_CLI_SERVICE_PROTOCOL_V10',
);
}
@@ -61,6 +63,7 @@ final class TTypeId {
const CHAR_TYPE = 19;
const INTERVAL_YEAR_MONTH_TYPE = 20;
const INTERVAL_DAY_TIME_TYPE = 21;
+ const TIMESTAMPTZ_TYPE = 22;
static public $__names = array(
0 => 'BOOLEAN_TYPE',
1 => 'TINYINT_TYPE',
@@ -84,6 +87,7 @@ final class TTypeId {
19 => 'CHAR_TYPE',
20 => 'INTERVAL_YEAR_MONTH_TYPE',
21 => 'INTERVAL_DAY_TIME_TYPE',
+ 22 => 'TIMESTAMPTZ_TYPE',
);
}
@@ -9724,6 +9728,7 @@ final class Constant extends \Thrift\Type\TConstant {
19 => true,
20 => true,
21 => true,
+ 22 => true,
);
}
diff --git a/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py b/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py
index c8d4f8f..e002291 100644
--- a/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py
+++ b/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py
@@ -27,6 +27,7 @@
19,
20,
21,
+ 22,
])
COMPLEX_TYPES = set([
10,
diff --git a/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py b/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
index fdf6b1f..d07ce10 100644
--- a/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
+++ b/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
@@ -26,6 +26,7 @@ class TProtocolVersion:
HIVE_CLI_SERVICE_PROTOCOL_V7 = 6
HIVE_CLI_SERVICE_PROTOCOL_V8 = 7
HIVE_CLI_SERVICE_PROTOCOL_V9 = 8
+ HIVE_CLI_SERVICE_PROTOCOL_V10 = 9
_VALUES_TO_NAMES = {
0: "HIVE_CLI_SERVICE_PROTOCOL_V1",
@@ -37,6 +38,7 @@ class TProtocolVersion:
6: "HIVE_CLI_SERVICE_PROTOCOL_V7",
7: "HIVE_CLI_SERVICE_PROTOCOL_V8",
8: "HIVE_CLI_SERVICE_PROTOCOL_V9",
+ 9: "HIVE_CLI_SERVICE_PROTOCOL_V10",
}
_NAMES_TO_VALUES = {
@@ -49,6 +51,7 @@ class TProtocolVersion:
"HIVE_CLI_SERVICE_PROTOCOL_V7": 6,
"HIVE_CLI_SERVICE_PROTOCOL_V8": 7,
"HIVE_CLI_SERVICE_PROTOCOL_V9": 8,
+ "HIVE_CLI_SERVICE_PROTOCOL_V10": 9,
}
class TTypeId:
@@ -74,6 +77,7 @@ class TTypeId:
CHAR_TYPE = 19
INTERVAL_YEAR_MONTH_TYPE = 20
INTERVAL_DAY_TIME_TYPE = 21
+ TIMESTAMPTZ_TYPE = 22
_VALUES_TO_NAMES = {
0: "BOOLEAN_TYPE",
@@ -98,6 +102,7 @@ class TTypeId:
19: "CHAR_TYPE",
20: "INTERVAL_YEAR_MONTH_TYPE",
21: "INTERVAL_DAY_TIME_TYPE",
+ 22: "TIMESTAMPTZ_TYPE",
}
_NAMES_TO_VALUES = {
@@ -123,6 +128,7 @@ class TTypeId:
"CHAR_TYPE": 19,
"INTERVAL_YEAR_MONTH_TYPE": 20,
"INTERVAL_DAY_TIME_TYPE": 21,
+ "TIMESTAMPTZ_TYPE": 22,
}
class TStatusCode:
diff --git a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
index 25adbb4..b7bbebc 100644
--- a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
+++ b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
@@ -25,6 +25,7 @@ PRIMITIVE_TYPES = Set.new([
19,
20,
21,
+ 22,
])
COMPLEX_TYPES = Set.new([
diff --git a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
index 4b1854c..80bc8ba 100644
--- a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
+++ b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
@@ -16,8 +16,9 @@ module TProtocolVersion
HIVE_CLI_SERVICE_PROTOCOL_V7 = 6
HIVE_CLI_SERVICE_PROTOCOL_V8 = 7
HIVE_CLI_SERVICE_PROTOCOL_V9 = 8
- VALUE_MAP = {0 => "HIVE_CLI_SERVICE_PROTOCOL_V1", 1 => "HIVE_CLI_SERVICE_PROTOCOL_V2", 2 => "HIVE_CLI_SERVICE_PROTOCOL_V3", 3 => "HIVE_CLI_SERVICE_PROTOCOL_V4", 4 => "HIVE_CLI_SERVICE_PROTOCOL_V5", 5 => "HIVE_CLI_SERVICE_PROTOCOL_V6", 6 => "HIVE_CLI_SERVICE_PROTOCOL_V7", 7 => "HIVE_CLI_SERVICE_PROTOCOL_V8", 8 => "HIVE_CLI_SERVICE_PROTOCOL_V9"}
- VALID_VALUES = Set.new([HIVE_CLI_SERVICE_PROTOCOL_V1, HIVE_CLI_SERVICE_PROTOCOL_V2, HIVE_CLI_SERVICE_PROTOCOL_V3, HIVE_CLI_SERVICE_PROTOCOL_V4, HIVE_CLI_SERVICE_PROTOCOL_V5, HIVE_CLI_SERVICE_PROTOCOL_V6, HIVE_CLI_SERVICE_PROTOCOL_V7, HIVE_CLI_SERVICE_PROTOCOL_V8, HIVE_CLI_SERVICE_PROTOCOL_V9]).freeze
+ HIVE_CLI_SERVICE_PROTOCOL_V10 = 9
+ VALUE_MAP = {0 => "HIVE_CLI_SERVICE_PROTOCOL_V1", 1 => "HIVE_CLI_SERVICE_PROTOCOL_V2", 2 => "HIVE_CLI_SERVICE_PROTOCOL_V3", 3 => "HIVE_CLI_SERVICE_PROTOCOL_V4", 4 => "HIVE_CLI_SERVICE_PROTOCOL_V5", 5 => "HIVE_CLI_SERVICE_PROTOCOL_V6", 6 => "HIVE_CLI_SERVICE_PROTOCOL_V7", 7 => "HIVE_CLI_SERVICE_PROTOCOL_V8", 8 => "HIVE_CLI_SERVICE_PROTOCOL_V9", 9 => "HIVE_CLI_SERVICE_PROTOCOL_V10"}
+ VALID_VALUES = Set.new([HIVE_CLI_SERVICE_PROTOCOL_V1, HIVE_CLI_SERVICE_PROTOCOL_V2, HIVE_CLI_SERVICE_PROTOCOL_V3, HIVE_CLI_SERVICE_PROTOCOL_V4, HIVE_CLI_SERVICE_PROTOCOL_V5, HIVE_CLI_SERVICE_PROTOCOL_V6, HIVE_CLI_SERVICE_PROTOCOL_V7, HIVE_CLI_SERVICE_PROTOCOL_V8, HIVE_CLI_SERVICE_PROTOCOL_V9, HIVE_CLI_SERVICE_PROTOCOL_V10]).freeze
end
module TTypeId
@@ -43,8 +44,9 @@ module TTypeId
CHAR_TYPE = 19
INTERVAL_YEAR_MONTH_TYPE = 20
INTERVAL_DAY_TIME_TYPE = 21
- VALUE_MAP = {0 => "BOOLEAN_TYPE", 1 => "TINYINT_TYPE", 2 => "SMALLINT_TYPE", 3 => "INT_TYPE", 4 => "BIGINT_TYPE", 5 => "FLOAT_TYPE", 6 => "DOUBLE_TYPE", 7 => "STRING_TYPE", 8 => "TIMESTAMP_TYPE", 9 => "BINARY_TYPE", 10 => "ARRAY_TYPE", 11 => "MAP_TYPE", 12 => "STRUCT_TYPE", 13 => "UNION_TYPE", 14 => "USER_DEFINED_TYPE", 15 => "DECIMAL_TYPE", 16 => "NULL_TYPE", 17 => "DATE_TYPE", 18 => "VARCHAR_TYPE", 19 => "CHAR_TYPE", 20 => "INTERVAL_YEAR_MONTH_TYPE", 21 => "INTERVAL_DAY_TIME_TYPE"}
- VALID_VALUES = Set.new([BOOLEAN_TYPE, TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE, FLOAT_TYPE, DOUBLE_TYPE, STRING_TYPE, TIMESTAMP_TYPE, BINARY_TYPE, ARRAY_TYPE, MAP_TYPE, STRUCT_TYPE, UNION_TYPE, USER_DEFINED_TYPE, DECIMAL_TYPE, NULL_TYPE, DATE_TYPE, VARCHAR_TYPE, CHAR_TYPE, INTERVAL_YEAR_MONTH_TYPE, INTERVAL_DAY_TIME_TYPE]).freeze
+ TIMESTAMPTZ_TYPE = 22
+ VALUE_MAP = {0 => "BOOLEAN_TYPE", 1 => "TINYINT_TYPE", 2 => "SMALLINT_TYPE", 3 => "INT_TYPE", 4 => "BIGINT_TYPE", 5 => "FLOAT_TYPE", 6 => "DOUBLE_TYPE", 7 => "STRING_TYPE", 8 => "TIMESTAMP_TYPE", 9 => "BINARY_TYPE", 10 => "ARRAY_TYPE", 11 => "MAP_TYPE", 12 => "STRUCT_TYPE", 13 => "UNION_TYPE", 14 => "USER_DEFINED_TYPE", 15 => "DECIMAL_TYPE", 16 => "NULL_TYPE", 17 => "DATE_TYPE", 18 => "VARCHAR_TYPE", 19 => "CHAR_TYPE", 20 => "INTERVAL_YEAR_MONTH_TYPE", 21 => "INTERVAL_DAY_TIME_TYPE", 22 => "TIMESTAMPTZ_TYPE"}
+ VALID_VALUES = Set.new([BOOLEAN_TYPE, TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE, FLOAT_TYPE, DOUBLE_TYPE, STRING_TYPE, TIMESTAMP_TYPE, BINARY_TYPE, ARRAY_TYPE, MAP_TYPE, STRUCT_TYPE, UNION_TYPE, USER_DEFINED_TYPE, DECIMAL_TYPE, NULL_TYPE, DATE_TYPE, VARCHAR_TYPE, CHAR_TYPE, INTERVAL_YEAR_MONTH_TYPE, INTERVAL_DAY_TIME_TYPE, TIMESTAMPTZ_TYPE]).freeze
end
module TStatusCode
diff --git a/service/src/java/org/apache/hive/service/cli/ColumnValue.java b/service/src/java/org/apache/hive/service/cli/ColumnValue.java
index 76e8c03..7195f87 100644
--- a/service/src/java/org/apache/hive/service/cli/ColumnValue.java
+++ b/service/src/java/org/apache/hive/service/cli/ColumnValue.java
@@ -191,6 +191,7 @@ public static TColumnValue toTColumnValue(TypeDescriptor typeDescriptor, Object
case DATE_TYPE:
return dateValue((Date)value);
case TIMESTAMP_TYPE:
+ case TIMESTAMPTZ_TYPE:
return timestampValue((Timestamp)value);
case INTERVAL_YEAR_MONTH_TYPE:
return stringValue((HiveIntervalYearMonth) value);
diff --git a/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java b/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
index d634bef..ad61fc3 100644
--- a/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
+++ b/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
@@ -116,6 +116,8 @@ public Integer getColumnSize() {
return 10;
case TIMESTAMP_TYPE:
return 29;
+ case TIMESTAMPTZ_TYPE:
+ return 39;
default:
return null;
}
diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/type/HiveTimestamp.java b/storage-api/src/java/org/apache/hadoop/hive/common/type/HiveTimestamp.java
new file mode 100644
index 0000000..e5e7feb
--- /dev/null
+++ b/storage-api/src/java/org/apache/hadoop/hive/common/type/HiveTimestamp.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import org.apache.commons.math3.util.Pair;
+
+import java.sql.Timestamp;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.TimeZone;
+
+/**
+ * A sub-class of java.sql.Timestamp, with timezoneID offset.
+ * Any timestamp that requires a specific timezone should use this type.
+ */
+public class HiveTimestamp extends Timestamp {
+ private static final ThreadLocal threadLocalDateFormat =
+ new ThreadLocal() {
+ @Override
+ protected DateFormat initialValue() {
+ return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ }
+ };
+
+ // We store the offset from UTC in minutes . Ranges from [-12:00, 14:00].
+ private int offsetInMin;
+
+ private transient String internalID = null;
+
+ private static final int MAX_OFFSET = 840;
+ private static final int MIN_OFFSET = -720;
+
+ // Used to indicate no offset is present
+ public static final int NULL_OFFSET = -800;
+
+ public HiveTimestamp(long time, String timezoneID) {
+ super(time);
+ offsetInMin = computeOffset(timezoneID);
+ }
+
+ public HiveTimestamp(long time, int offsetInMin) {
+ super(time);
+ this.offsetInMin = validateOffset(offsetInMin);
+ }
+
+ private int computeOffset(String timezoneID) {
+ validateTimezoneID(timezoneID);
+ TimeZone tz = TimeZone.getTimeZone(timezoneID);
+ return tz.getOffset(getTime()) / 1000 / 60;
+ }
+
+ public int getOffsetInMin() {
+ return offsetInMin;
+ }
+
+ public void setOffsetInMin(int offsetInMin) {
+ this.offsetInMin = validateOffset(offsetInMin);
+ internalID = null;
+ }
+
+ private String getTimezoneID() {
+ if (internalID == null) {
+ StringBuilder builder = new StringBuilder("GMT");
+ if (offsetInMin != 0) {
+ if (offsetInMin > 0) {
+ builder.append("+");
+ } else {
+ builder.append("-");
+ }
+ int tmp = offsetInMin > 0 ? offsetInMin : -offsetInMin;
+ int offsetHour = tmp / 60;
+ int offsetMin = tmp % 60;
+ builder.append(String.format("%02d", offsetHour)).append(":").
+ append(String.format("%02d", offsetMin));
+ }
+ internalID = builder.toString();
+ }
+ return internalID;
+ }
+
+ private static void validateTimezoneID(String timezoneID) {
+ if (timezoneID == null) {
+ throw new IllegalArgumentException("Timezone ID is null");
+ }
+ TimeZone tz = TimeZone.getTimeZone(timezoneID);
+ // We may end up with GMT in case of invalid timezoneID
+ if (tz.getID().equals("GMT") && !tz.getID().equals(timezoneID)) {
+ throw new IllegalArgumentException("Unknown timezoneID: " + timezoneID);
+ }
+ }
+
+ @Override
+ public String toString() {
+ String ts = super.toString();
+ DateFormat dateFormat = threadLocalDateFormat.get();
+ TimeZone defaultTZ = dateFormat.getTimeZone();
+ try {
+ String timezoneID = getTimezoneID();
+ dateFormat.setTimeZone(TimeZone.getTimeZone(timezoneID));
+ String r = dateFormat.format(this) + ts.substring(19);
+ r += " " + timezoneID;
+ return r;
+ } finally {
+ dateFormat.setTimeZone(defaultTZ);
+ }
+ }
+
+ @Override
+ public int compareTo(Timestamp ts) {
+ int result = super.compareTo(ts);
+ if (result == 0) {
+ if (ts instanceof HiveTimestamp) {
+ result = offsetInMin - ((HiveTimestamp) ts).offsetInMin;
+ } else {
+ result = 1;
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof Timestamp) {
+ return compareTo((Timestamp) o) == 0;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return super.hashCode() ^ offsetInMin;
+ }
+
+ public static HiveTimestamp valueOf(String timestamp) {
+ Pair pair = extractTimezoneID(timestamp);
+ return valueOf(pair.getFirst(), pair.getSecond());
+ }
+
+ public static Timestamp parseWithFallback(String timestamp) {
+ Pair pair = extractTimezoneID(timestamp);
+ if (pair.getSecond() == null) {
+ return Timestamp.valueOf(timestamp);
+ }
+ return valueOf(pair.getFirst(), pair.getSecond());
+ }
+
+ public static HiveTimestamp valueOf(String timestamp, String timezoneID) {
+ Timestamp ts = Timestamp.valueOf(timestamp);
+ validateTimezoneID(timezoneID);
+ DateFormat dateFormat = threadLocalDateFormat.get();
+ TimeZone defaultTZ = dateFormat.getTimeZone();
+ try {
+ int nanos = ts.getNanos();
+ dateFormat.setTimeZone(TimeZone.getTimeZone(timezoneID));
+ Date date = dateFormat.parse(timestamp);
+ HiveTimestamp hiveTimestamp = new HiveTimestamp(date.getTime(), timezoneID);
+ hiveTimestamp.setNanos(nanos);
+ return hiveTimestamp;
+ } catch (ParseException e) {
+ throw new IllegalArgumentException(e);
+ } finally {
+ dateFormat.setTimeZone(defaultTZ);
+ }
+ }
+
+ // parse s into a timestamp with a timezoneID
+ private static Pair extractTimezoneID(String s) {
+ s = s.trim();
+ int divide = s.indexOf(' ');
+ if (divide != -1) {
+ divide = s.indexOf(' ', divide + 1);
+ if (divide != -1) {
+ return new Pair<>(s.substring(0, divide), s.substring(divide + 1));
+ }
+ }
+ return new Pair<>(s, null);
+ }
+
+ public static boolean isValidOffset(int offsetInMin) {
+ return offsetInMin >= MIN_OFFSET && offsetInMin <= MAX_OFFSET;
+ }
+
+ private static int validateOffset(int offsetInMin) {
+ if (!isValidOffset(offsetInMin) && offsetInMin != NULL_OFFSET) {
+ throw new IllegalArgumentException("Timezone offset out of range: " + offsetInMin);
+ }
+ return offsetInMin;
+ }
+}
diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java b/storage-api/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java
index 4a745e4..f002703 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java
@@ -307,6 +307,9 @@ public int lengthForBooleanArrayOfSize(int length) {
public int lengthForTimestampArrayOfSize(int length) {
return lengthForPrimitiveArrayOfSize(lengthOfTimestamp(), length);
}
+ public int lengthForHiveTimestampArrayOfSize(int length) {
+ return lengthForPrimitiveArrayOfSize(lengthOfHiveTimestamp(), length);
+ }
public int lengthForDateArrayOfSize(int length) {
return lengthForPrimitiveArrayOfSize(lengthOfDate(), length);
}
@@ -334,6 +337,10 @@ public int lengthOfTimestamp() {
return object() + primitive2();
}
+ public int lengthOfHiveTimestamp() {
+ return lengthOfTimestamp() + primitive2();
+ }
+
public int lengthOfDate() {
// object overhead + 8 bytes for long (fastTime) + 16 bytes for cdate
return object() + 3 * primitive2();
diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
index 41db9ca..66b934d 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
@@ -19,6 +19,9 @@
package org.apache.hadoop.hive.ql.util;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveTimestamp;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.sql.Timestamp;
@@ -27,6 +30,7 @@
* Utitilities for Timestamps and the relevant conversions.
*/
public class TimestampUtils {
+ private static final Logger LOG = LoggerFactory.getLogger(TimestampUtils.class);
public static final BigDecimal BILLION_BIG_DECIMAL = BigDecimal.valueOf(1000000000);
/**
@@ -99,4 +103,15 @@ public static long millisToSeconds(long millis) {
return (millis - 999) / 1000;
}
}
+
+ public static HiveTimestamp getHiveTimestampOrNull(String s) {
+ HiveTimestamp result;
+ try {
+ result = HiveTimestamp.valueOf(s);
+ } catch (IllegalArgumentException e) {
+ LOG.debug("Invalid string " + s + " for TIMESTAMPTZ", e);
+ result = null;
+ }
+ return result;
+ }
}