diff --git accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java index 926f572..761d432 100644 --- accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java +++ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java @@ -23,7 +23,7 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.sql.Date; -import java.sql.Timestamp; +import java.time.LocalDateTime; import java.util.Map.Entry; import org.apache.accumulo.core.client.BatchWriter; @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -242,9 +243,9 @@ public void testBinaryTypes() throws Exception { // tiemestamp baos.reset(); - Timestamp timestampValue = new Timestamp(now.getTime()); + Timestamp timestampValue = new Timestamp(LocalDateTime.now()); ByteStream.Output output = new ByteStream.Output(); - TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime())); + TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(LocalDateTime.now())); timestampWritable.write(new DataOutputStream(output)); output.close(); m.put(cfBytes, "timestamp".getBytes(), output.toByteArray()); @@ -598,7 +599,7 @@ public void testUtf8Types() throws Exception { m.put(cfBytes, "date".getBytes(), baos.toByteArray()); // timestamp - Timestamp timestampValue = new Timestamp(now.getTime()); + Timestamp timestampValue = new Timestamp(LocalDateTime.now()); baos.reset(); JavaTimestampObjectInspector timestampOI = (JavaTimestampObjectInspector) PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(TypeInfoFactory diff --git common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java index c49aefd..94f06a8 100644 --- common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java +++ common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hive.common.type; -import java.sql.Timestamp; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.time.DateTimeException; @@ -129,10 +128,15 @@ public static TimestampTZ parseOrNull(String s, ZoneId defaultTimeZone) { // Converts Date to TimestampTZ. The conversion is done text-wise since // Date/Timestamp should be treated as description of date/time. public static TimestampTZ convert(Date date, ZoneId defaultTimeZone) { - String s = date instanceof Timestamp ? date.toString() : CONVERT_FORMATTER.get().format(date); + String s = date instanceof java.sql.Timestamp ? date.toString() : CONVERT_FORMATTER.get().format(date); return parse(s, defaultTimeZone); } + // Converts Timestamp to TimestampTZ. + public static TimestampTZ convert(Timestamp ts, ZoneId defaultTimeZone) { + return parse(ts.toString(), defaultTimeZone); + } + public static ZoneId parseTimeZone(String timeZoneStr) { if (timeZoneStr == null || timeZoneStr.trim().isEmpty() || timeZoneStr.trim().toLowerCase().equals("local")) { diff --git common/src/java/org/apache/hive/common/util/TimestampParser.java common/src/java/org/apache/hive/common/util/TimestampParser.java index db7ea25..141f48e 100644 --- common/src/java/org/apache/hive/common/util/TimestampParser.java +++ common/src/java/org/apache/hive/common/util/TimestampParser.java @@ -18,17 +18,16 @@ package org.apache.hive.common.util; -import java.math.BigDecimal; -import java.sql.Timestamp; import java.util.Arrays; -import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.hadoop.hive.common.type.Timestamp; import org.joda.time.DateTime; -import org.joda.time.MutableDateTime; import org.joda.time.DateTimeFieldType; +import org.joda.time.MutableDateTime; +import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatterBuilder; @@ -46,7 +45,8 @@ protected final static String[] stringArray = new String[] {}; protected final static String millisFormatString = "millis"; - protected final static DateTime startingDateValue = new DateTime(1970, 1, 1, 0, 0, 0, 0); + protected final static DateTime startingDateValue = + new DateTime(1970, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); protected String[] formatStrings = null; protected DateTimeFormatter fmt = null; @@ -91,14 +91,15 @@ public TimestampParser(String[] formatStrings) { public Timestamp parseTimestamp(String strValue) throws IllegalArgumentException { if (fmt != null) { // reset value in case any date fields are missing from the date pattern - MutableDateTime mdt = new MutableDateTime(startingDateValue); + MutableDateTime mdt = new MutableDateTime( + startingDateValue, ISOChronology.getInstanceUTC()); // Using parseInto() avoids throwing exception when parsing, // allowing fallback to default timestamp parsing if custom patterns fail. int ret = fmt.parseInto(mdt, strValue, 0); // Only accept parse results if we parsed the entire string if (ret == strValue.length()) { - return new Timestamp(mdt.getMillis()); + return Timestamp.ofEpochMilli(mdt.getMillis()); } } @@ -141,7 +142,8 @@ public int parseInto(DateTimeParserBucket bucket, String text, int position) { // Joda DateTime only has precision to millis, cut off any fractional portion long millis = Long.parseLong(matcher.group(1)); - DateTime dt = new DateTime(millis); + DateTime dt = + new DateTime(millis, ISOChronology.getInstanceUTC()); for (DateTimeFieldType field : dateTimeFields) { bucket.saveField(field, dt.get(field)); } diff --git common/src/test/org/apache/hive/common/util/TestTimestampParser.java common/src/test/org/apache/hive/common/util/TestTimestampParser.java index 4917603..baaf346 100644 --- common/src/test/org/apache/hive/common/util/TestTimestampParser.java +++ common/src/test/org/apache/hive/common/util/TestTimestampParser.java @@ -18,11 +18,10 @@ package org.apache.hive.common.util; -import java.sql.Timestamp; -import java.util.Arrays; -import java.util.List; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -import static org.junit.Assert.*; +import org.apache.hadoop.hive.common.type.Timestamp; import org.junit.Test; public class TestTimestampParser { @@ -133,10 +132,10 @@ public void testMillisParser() { TimestampParser tp = new TimestampParser(patterns); ValidTimestampCase[] validCases = { - new ValidTimestampCase("0", new Timestamp(0)), - new ValidTimestampCase("-1000000", new Timestamp(-1000000)), - new ValidTimestampCase("1420509274123", new Timestamp(1420509274123L)), - new ValidTimestampCase("1420509274123.456789", new Timestamp(1420509274123L)), + new ValidTimestampCase("0", Timestamp.ofEpochMilli(0)), + new ValidTimestampCase("-1000000", Timestamp.ofEpochMilli(-1000000)), + new ValidTimestampCase("1420509274123", Timestamp.ofEpochMilli(1420509274123L)), + new ValidTimestampCase("1420509274123.456789", Timestamp.ofEpochMilli(1420509274123L)), // Other format pattern should also work new ValidTimestampCase("1945-12-31T23:59:59", diff --git druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java index 4a7952e..1f59fe5 100644 --- druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java +++ druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java @@ -510,7 +510,7 @@ public Writable serialize(Object o, ObjectInspector objectInspector) throws SerD } value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, ((TimestampObjectInspector) fields.get(columns.length).getFieldObjectInspector()) - .getPrimitiveJavaObject(values.get(columns.length)).getTime() + .getPrimitiveJavaObject(values.get(columns.length)).getMillis() ); return new DruidWritable(value); } diff --git druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java index 2d351e4..51c0a3e 100644 --- druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java +++ druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java @@ -27,7 +27,6 @@ import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; -import java.sql.Timestamp; import java.time.Instant; import java.time.ZoneOffset; import java.util.ArrayList; @@ -35,20 +34,12 @@ import java.util.Map.Entry; import java.util.Properties; -import com.fasterxml.jackson.core.type.TypeReference; -import com.google.common.util.concurrent.SettableFuture; -import com.metamx.http.client.HttpClient; -import com.metamx.http.client.response.HttpResponseHandler; -import io.druid.data.input.Row; -import io.druid.query.Result; -import io.druid.query.select.SelectResultValue; -import io.druid.query.timeseries.TimeseriesResultValue; -import io.druid.query.topn.TopNResultValue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.druid.DruidStorageHandlerUtils; @@ -79,17 +70,25 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; - import org.junit.Before; import org.junit.Test; import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonMappingException; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import com.google.common.util.concurrent.SettableFuture; +import com.metamx.http.client.HttpClient; +import com.metamx.http.client.response.HttpResponseHandler; +import io.druid.data.input.Row; import io.druid.query.Query; +import io.druid.query.Result; +import io.druid.query.select.SelectResultValue; +import io.druid.query.timeseries.TimeseriesResultValue; +import io.druid.query.topn.TopNResultValue; /** * Basic tests for Druid SerDe. The examples are taken from Druid 0.9.1.1 @@ -845,7 +844,7 @@ private void deserializeQueryResults(DruidSerDe serDe, String queryType, String new IntWritable(1112123), new ShortWritable((short) 12), new ByteWritable((byte) 0), - new TimestampWritable(new Timestamp(1377907200000L)) // granularity + new TimestampWritable(Timestamp.ofEpochSecond(1377907200L)) // granularity }; private static final DruidWritable DRUID_WRITABLE = new DruidWritable( ImmutableMap.builder() diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java index c6f3b0f..3c657d5 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java @@ -102,7 +102,7 @@ public Writable serialize(Object obj, ObjectInspector objInspector) throws Excep timestamp = ((LongObjectInspector)inspector).get(value); } else { PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector; - timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime(); + timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getMillis(); } } diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java index 1ef4545..ece3d3a 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java @@ -348,7 +348,7 @@ private long getTimestampVal(IndexSearchCondition sc) throws IOException { timestamp = ((LongObjectInspector)inspector).get(value); } else { PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector; - timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime(); + timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getMillis(); } } catch (HiveException e) { throw new IOException(e); diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java index ffb005e..7b7c170 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java @@ -23,6 +23,7 @@ import java.util.List; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory; import org.apache.hadoop.hive.serde2.SerDeException; @@ -161,7 +162,8 @@ private Object uncheckedGetField(int fieldID) { } LazyObjectBase lz = fields[fieldID]; if (lz instanceof LazyTimestamp) { - ((LazyTimestamp) lz).getWritableObject().setTime(timestamp); + ((LazyTimestamp) lz).getWritableObject().set( + Timestamp.ofEpochMilli(timestamp)); } else { ((LazyLong) lz).getWritableObject().set(timestamp); } diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java index 17f9909..fcd896c 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java @@ -21,7 +21,6 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -36,6 +35,7 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java index d03ca3d..499c8e8 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -217,7 +218,7 @@ public static void writeDatum(DataOutput out, Object val) throws IOException { new DateWritable((Date)val).write(out); return; case DataType.TIMESTAMP: - new TimestampWritable((java.sql.Timestamp)val).write(out); + new TimestampWritable((Timestamp)val).write(out); return; default: throw new IOException("Unexpected data type " + type + diff --git ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt index 2e66b3a..0ec7076 100644 --- ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; @@ -47,7 +46,7 @@ public class extends VectorExpression { this.colNum1 = colNum1; this.colNum2 = colNum2; this.outputColumn = outputColumn; - scratchTimestamp1 = new Timestamp(0); + scratchTimestamp1 = new Timestamp(); } public () { @@ -93,12 +92,12 @@ public class extends VectorExpression { * conditional checks in the inner loop. */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); dtm.( scratchTimestamp1, inputColVector2.asScratch(0), outputColVector.getScratch()); outputColVector.setFromScratch(0); } else if (inputColVector1.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; @@ -118,14 +117,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value2, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -135,14 +134,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, inputColVector2.asScratch(i), outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, inputColVector2.asScratch(i), outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt index e679449..2bf1932 100644 --- ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -48,7 +47,7 @@ public class extends VectorExpression { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; - scratchTimestamp1 = new Timestamp(0); + scratchTimestamp1 = new Timestamp(); } public () { @@ -81,7 +80,7 @@ public class extends VectorExpression { } if (inputColVector1.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(0); @@ -91,14 +90,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -108,7 +107,7 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -116,7 +115,7 @@ public class extends VectorExpression { } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt index 85d88fd..e2762d6 100644 --- ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -54,8 +53,8 @@ public class extends VectorExpression { public (long value, int colNum, int outputColumn) { this.colNum = colNum; // Scalar input #1 is type date (days). For the math we convert it to a timestamp. - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = new Timestamp(); + this.value.setTimeInMillis(DateWritable.daysToMillis((int) value)); this.outputColumn = outputColumn; } diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt index 9d5432f..946fdb0 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt index 610c062..238f4b9 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt index 1b86691..2394fb3 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.*; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt index 73c46a1..44da85e 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt index 9c268e2..739fec7 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt index eeb73c9..b5de86a 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt index 23790a5..41d0a22 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt index 0e10779..9f83e0c 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt index 5a6def3..3f41705 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt index 9eba829..ec24167 100644 --- ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt index 9a06822..9d2e56d 100644 --- ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt index 9a0d397..86ec33d 100644 --- ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt index cff2deb..8ee3250 100644 --- ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.*; diff --git ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt index 8308a30..a1db5bb 100644 --- ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt index 7aeff81..73cee07 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; @@ -47,7 +46,7 @@ public class extends VectorExpression { this.colNum1 = colNum1; this.colNum2 = colNum2; this.outputColumn = outputColumn; - scratchTimestamp2 = new Timestamp(0); + scratchTimestamp2 = new Timestamp(); } public () { @@ -94,7 +93,7 @@ public class extends VectorExpression { * conditional checks in the inner loop. */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.( inputColVector1.asScratch(0), scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(0); @@ -103,21 +102,21 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value1, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value1, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } } else if (inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; @@ -136,14 +135,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( inputColVector1.asScratch(i), scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( inputColVector1.asScratch(i), scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt index f8cb880..faba352 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; @@ -45,8 +44,8 @@ public class extends VectorExpression { public (int colNum, long value, int outputColumn) { this.colNum = colNum; - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = new Timestamp(); + this.value.setTimeInMillis(DateWritable.daysToMillis((int) value)); this.outputColumn = outputColumn; } diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt index 989e2f5..c52295e 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt index ad43cac..016b24c 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt index 32b49a3..1ff124f 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt index 2710fa4..d38b924 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt index 32647f2..f17c557 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.*; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt index dea4db2..7367e49 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -56,7 +55,7 @@ public class extends VectorExpression { this.colNum = colNum; this.value = value; this.outputColumn = outputColumn; - scratchTimestamp2 = new Timestamp(0); + scratchTimestamp2 = new Timestamp(); } public () { @@ -95,7 +94,7 @@ public class extends VectorExpression { } if (inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(0); @@ -105,14 +104,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -122,7 +121,7 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -130,7 +129,7 @@ public class extends VectorExpression { } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt index e82b9e2..643b9f0 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt index 0d8a26b..2374480 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt index 6815b5b..666728c 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.exec.vector.expressions.*; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt index ec0a395..2e5b0d2 100644 --- ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt +++ ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt index f78de56..5636bb0 100644 --- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt +++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; @@ -59,7 +58,7 @@ public class extends VectorAggregateExpression { transient private boolean isNull = true; public Aggregation() { - value = new Timestamp(0); + value = new Timestamp(); } public void checkValue(TimestampColumnVector colVector, int index) { @@ -79,7 +78,7 @@ public class extends VectorAggregateExpression { @Override public void reset () { isNull = true; - this.value.setTime(0); + this.value.setTimeInMillis(0); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java index f02a300..79aaa65 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.sql.Date; -import java.sql.Timestamp; import java.util.List; import java.util.Map; @@ -38,6 +37,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion; import org.apache.hadoop.hive.serde2.io.ByteWritable; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java index 155c9b8..b147b6a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java @@ -18,9 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -77,7 +76,7 @@ public static void debugDisplayOneRow(VectorizedRowBatch batch, int index, Strin } else if (colVector instanceof DecimalColumnVector) { sb.append(((DecimalColumnVector) colVector).vector[index].toString()); } else if (colVector instanceof TimestampColumnVector) { - Timestamp timestamp = new Timestamp(0); + Timestamp timestamp = new Timestamp(); ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index); sb.append(timestamp.toString()); } else if (colVector instanceof IntervalDayTimeColumnVector) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java index 96b8f78..3f75226 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Timestamp; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -27,6 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java index 3e1fcdd..c343fd1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java @@ -20,11 +20,11 @@ import org.apache.hive.common.util.Murmur3; -import java.sql.Timestamp; import java.util.Arrays; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.KeyWrapper; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -74,7 +74,7 @@ private HiveDecimalWritable[] decimalValues; private Timestamp[] timestampValues; - private static Timestamp ZERO_TIMESTAMP = new Timestamp(0); + private static Timestamp ZERO_TIMESTAMP = new Timestamp(); private HiveIntervalDayTime[] intervalDayTimeValues; private static HiveIntervalDayTime ZERO_INTERVALDAYTIME= new HiveIntervalDayTime(0, 0); @@ -111,7 +111,7 @@ private VectorHashKeyWrapper(HashContext ctx, int longValuesCount, int doubleVal byteLengths = EMPTY_INT_ARRAY; } for(int i = 0; i < timestampValuesCount; ++i) { - timestampValues[i] = new Timestamp(0); + timestampValues[i] = new Timestamp(); } for(int i = 0; i < intervalDayTimeValuesCount; ++i) { intervalDayTimeValues[i] = new HiveIntervalDayTime(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 3fd2141..e61640c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -20,7 +20,6 @@ import java.lang.reflect.Constructor; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -41,6 +40,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java index 4945d74..90c6e09 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java @@ -20,24 +20,17 @@ import java.io.IOException; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; -import java.util.LinkedList; import java.util.List; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.type.HiveChar; -import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; -import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -49,18 +42,14 @@ import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; -import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; @@ -69,7 +58,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.DataOutputBuffer; @@ -78,7 +66,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hive.common.util.DateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class VectorizedBatchUtil { private static final Logger LOG = LoggerFactory.getLogger(VectorizedBatchUtil.class); @@ -882,7 +871,7 @@ public static Writable getPrimitiveWritable(PrimitiveCategory primitiveCategory) case LONG: return new LongWritable(0); case TIMESTAMP: - return new TimestampWritable(new Timestamp(0)); + return new TimestampWritable(new Timestamp()); case DATE: return new DateWritable(new Date(0)); case FLOAT: @@ -959,7 +948,7 @@ public static StringBuilder debugFormatOneRow(VectorizedRowBatch batch, } else if (colVector instanceof DecimalColumnVector) { sb.append(((DecimalColumnVector) colVector).vector[index].toString()); } else if (colVector instanceof TimestampColumnVector) { - Timestamp timestamp = new Timestamp(0); + Timestamp timestamp = new Timestamp(); ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index); sb.append(timestamp.toString()); } else if (colVector instanceof IntervalDayTimeColumnVector) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 9c35488..c278b20 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -19,42 +19,35 @@ import java.io.IOException; import java.sql.Date; -import java.sql.Timestamp; -import java.util.Arrays; import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.common.type.HiveChar; -import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.IOPrepareCache; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; -import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; -import org.apache.hadoop.hive.ql.plan.Explain.Level; -import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; -import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.mapred.FileSplit; -import org.apache.hive.common.util.DateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java index 05b0e8a..9bedfdc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java @@ -41,7 +41,7 @@ public CastDateToTimestamp() { } private void setDays(TimestampColumnVector timestampColVector, long[] vector, int elementNum) { - timestampColVector.getScratchTimestamp().setTime(DateWritable.daysToMillis((int) vector[elementNum])); + timestampColVector.getScratchTimestamp().setTimeInMillis(DateWritable.daysToMillis((int) vector[elementNum])); timestampColVector.setFromScratchTimestamp(elementNum); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java index dfd9802..ae00fdf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.util.TimestampUtils; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java index 9f71b9a..3255095 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java @@ -18,9 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.*; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; public class CastLongToTimestamp extends VectorExpression { private static final long serialVersionUID = 1L; @@ -39,7 +40,7 @@ public CastLongToTimestamp() { } private void setSeconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) { - timestampColVector.getScratchTimestamp().setTime(vector[elementNum] * 1000); + timestampColVector.getScratchTimestamp().setTimeInSeconds(vector[elementNum]); timestampColVector.setFromScratchTimestamp(elementNum); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java index 4cc120a..d28450d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java @@ -18,9 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.*; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; public class CastMillisecondsLongToTimestamp extends VectorExpression { private static final long serialVersionUID = 1L; @@ -40,7 +41,7 @@ public CastMillisecondsLongToTimestamp() { private void setMilliseconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) { - timestampColVector.getScratchTimestamp().setTime(vector[elementNum]); + timestampColVector.getScratchTimestamp().setTimeInMillis(vector[elementNum]); timestampColVector.setFromScratchTimestamp(elementNum); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java index 487c4b0..b6bff19 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java @@ -19,13 +19,22 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; -import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveChar; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; -import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java index e04280f..8896f40 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; -import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -44,8 +44,8 @@ public DateColSubtractDateColumn(int colNum1, int colNum2, int outputColumn) { this.colNum1 = colNum1; this.colNum2 = colNum2; this.outputColumn = outputColumn; - scratchTimestamp1 = new Timestamp(0); - scratchTimestamp2 = new Timestamp(0); + scratchTimestamp1 = new Timestamp(); + scratchTimestamp2 = new Timestamp(); } public DateColSubtractDateColumn() { @@ -94,38 +94,38 @@ public void evaluate(VectorizedRowBatch batch) { * conditional checks in the inner loop. */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(0); } else if (inputColVector1.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } } else if (inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } @@ -134,15 +134,15 @@ public void evaluate(VectorizedRowBatch batch) { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java index bce24ea..506b293 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java @@ -18,14 +18,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; -import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -43,10 +40,10 @@ public DateColSubtractDateScalar(int colNum, long value, int outputColumn) { this.colNum = colNum; - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = new Timestamp(); + this.value.setTimeInMillis(DateWritable.daysToMillis((int) value)); this.outputColumn = outputColumn; - scratchTimestamp1 = new Timestamp(0); + scratchTimestamp1 = new Timestamp(); } public DateColSubtractDateScalar() { @@ -79,7 +76,7 @@ public void evaluate(VectorizedRowBatch batch) { } if (inputColVector1.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(0); // Even if there are no nulls, we always copy over entry 0. Simplifies code. @@ -88,13 +85,13 @@ public void evaluate(VectorizedRowBatch batch) { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } @@ -103,14 +100,14 @@ public void evaluate(VectorizedRowBatch batch) { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); outputIsNull[i] = inputIsNull[i]; } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java index 62f29f1..a7633b4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.*; -import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -40,10 +40,10 @@ public DateScalarSubtractDateColumn(long value, int colNum, int outputColumn) { this.colNum = colNum; - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = new Timestamp(); + this.value.setTimeInMillis(DateWritable.daysToMillis((int) value)); this.outputColumn = outputColumn; - scratchTimestamp2 = new Timestamp(0); + scratchTimestamp2 = new Timestamp(); } public DateScalarSubtractDateColumn() { @@ -82,7 +82,7 @@ public void evaluate(VectorizedRowBatch batch) { } if (inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(0); // Even if there are no nulls, we always copy over entry 0. Simplifies code. @@ -91,13 +91,13 @@ public void evaluate(VectorizedRowBatch batch) { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } @@ -106,14 +106,14 @@ public void evaluate(VectorizedRowBatch batch) { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); outputIsNull[i] = inputIsNull[i]; } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java index 1a34118..7d2be31 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java @@ -18,12 +18,20 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; -import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.plan.DynamicValue; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java index a7666bc..d370e30 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.Arrays; import java.util.HashSet; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java index f6cc971..85aa87d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; public interface ITimestampInExpr { void setInListValues(Timestamp[] inVals); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java index ae997e0..9083700 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java index eb0c1c0..56d13b4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java @@ -18,14 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; /** * Compute IF(expr1, expr2, expr3) for 3 input column expressions. diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java index 3d53df1..6ba44c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java index 3e4a195..a6c845c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java @@ -18,13 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; /** * Compute IF(expr1, expr2, expr3) for 3 input column expressions. diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java index cd00d3a..8d85f7c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import java.sql.Timestamp; - /** * Compute IF(expr1, expr2, expr3) for 3 input expressions. * The first is always a boolean (LongColumnVector). diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java index 5273131..a3568bf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import java.sql.Timestamp; import java.util.Arrays; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java index 5e76de8..6fa1399 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.Arrays; import java.util.HashSet; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java index 85dacd7..82b339a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java index d8df5cc..45119b0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -31,10 +30,23 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; -import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils; +import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.serde2.io.*; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -57,15 +69,14 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableStringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Text; -import org.apache.hive.common.util.DateUtils; /** * VectorExpressionWritableFactory helper class for generating VectorExpressionWritable objects. @@ -865,7 +876,7 @@ public Object setValue(Object field, Timestamp value) { @Override public Object initValue(Object ignored) { - return ((SettableTimestampObjectInspector) this.objectInspector).create(new Timestamp(0)); + return ((SettableTimestampObjectInspector) this.objectInspector).create(new Timestamp()); } }.init(fieldObjInspector); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java index e9000c6..caae36c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java @@ -53,7 +53,7 @@ public VectorUDFTimestampFieldTimestamp() { } protected long getTimestampField(TimestampColumnVector timestampColVector, int elementNum) { - calendar.setTime(timestampColVector.asScratchTimestamp(elementNum)); + calendar.setTimeInMillis(timestampColVector.asScratchTimestamp(elementNum).getMillis()); return calendar.get(field); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java index 2bd7756..5a7f192 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java @@ -30,7 +30,7 @@ @Override protected long getTimestampField(TimestampColumnVector timestampColVector, int elementNum) { - return timestampColVector.asScratchTimestamp(elementNum).getTime() / 1000; + return timestampColVector.asScratchTimestamp(elementNum).getSeconds(); } public VectorUDFUnixTimeStampTimestamp(int colNum, int outputColumn) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java index 0d72ba8..0293277 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java @@ -18,23 +18,19 @@ package org.apache.hadoop.hive.ql.exec.vector.ptf; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; -import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; @@ -44,25 +40,19 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion; import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.BaseWork; -import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PTFDesc; import org.apache.hadoop.hive.ql.plan.VectorPTFDesc; -import org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType; +import org.apache.hadoop.hive.ql.plan.VectorPTFInfo; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; -import org.apache.hadoop.hive.ql.plan.VectorPTFInfo; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -526,7 +516,7 @@ private void setCurrentPartition(VectorizedRowBatch batch) { break; case TIMESTAMP: if (currentPartitionTimestamps[i] == null) { - currentPartitionTimestamps[i] = new Timestamp(0); + currentPartitionTimestamps[i] = new Timestamp(); } ((TimestampColumnVector) colVector).timestampUpdate(currentPartitionTimestamps[i], 0); break; diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index dcefada..fbd7ac3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -19,12 +19,10 @@ package org.apache.hadoop.hive.ql.io.orc; import java.io.IOException; -import java.sql.Timestamp; import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -41,6 +39,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; @@ -61,9 +60,6 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; -import com.google.common.annotations.VisibleForTesting; -import org.apache.orc.PhysicalWriter; - /** * An ORC file writer. The file is divided into stripes, which is the natural * unit of work when reading. Each stripe is buffered in memory until the @@ -195,9 +191,8 @@ static void setColumn(int rowId, ColumnVector column, } case TIMESTAMP: { TimestampColumnVector vector = (TimestampColumnVector) column; - Timestamp ts = ((TimestampObjectInspector) inspector) - .getPrimitiveJavaObject(obj); - vector.set(rowId, ts); + vector.set(rowId, ((TimestampObjectInspector) inspector) + .getPrimitiveJavaObject(obj)); break; } case DATE: { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java index 76d93b8..6252ccc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java @@ -14,10 +14,10 @@ package org.apache.hadoop.hive.ql.io.parquet.convert; import java.math.BigDecimal; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Map; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; diff --git ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java index 3fd75d2..b6f204c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java @@ -13,12 +13,13 @@ */ package org.apache.hadoop.hive.ql.io.parquet.timestamp; -import java.sql.Timestamp; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.TimeZone; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.common.type.Timestamp; + import jodd.datetime.JDateTime; /** @@ -58,7 +59,7 @@ public static Calendar getCalendar(boolean skipConversion) { public static NanoTime getNanoTime(Timestamp ts, boolean skipConversion) { Calendar calendar = getCalendar(skipConversion); - calendar.setTime(ts); + calendar.setTimeInMillis(ts.getMillis()); int year = calendar.get(Calendar.YEAR); if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) { year = 1 - year; @@ -106,8 +107,7 @@ public static Timestamp getTimestamp(NanoTime nt, boolean skipConversion) { calendar.set(Calendar.HOUR_OF_DAY, hour); calendar.set(Calendar.MINUTE, minutes); calendar.set(Calendar.SECOND, seconds); - Timestamp ts = new Timestamp(calendar.getTimeInMillis()); - ts.setNanos((int) nanos); + Timestamp ts = Timestamp.ofEpochMilli(calendar.getTimeInMillis(), (int) nanos); return ts; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java index e9543c6..603cb8c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java @@ -13,6 +13,7 @@ */ package org.apache.hadoop.hive.ql.io.parquet.vector; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; @@ -44,7 +45,6 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; -import java.sql.Timestamp; import static org.apache.parquet.column.ValuesType.DEFINITION_LEVEL; import static org.apache.parquet.column.ValuesType.REPETITION_LEVEL; diff --git ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java index 6b7b50a..a9e88a4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java @@ -16,6 +16,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -48,7 +49,6 @@ import org.apache.parquet.schema.Type; import java.sql.Date; -import java.sql.Timestamp; import java.util.List; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java index 3b42741..b2d21c8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java @@ -19,7 +19,6 @@ import java.math.BigDecimal; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; @@ -48,6 +47,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.Hive; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java index 9d9bf89..59b8a24 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java @@ -19,14 +19,13 @@ import java.math.BigDecimal; import java.math.BigInteger; -import java.sql.Timestamp; import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import org.apache.calcite.avatica.util.TimeUnit; @@ -59,6 +58,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -383,8 +383,6 @@ private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, List c GenericUDF udf = func.getGenericUDF(); if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar) || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate) - // Calcite can not specify the scale for timestamp. As a result, all - // the millisecond part will be lost || (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ) || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) { castExpr = cluster.getRexBuilder().makeAbstractCast( @@ -666,9 +664,10 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value)); break; case DATE: - final Calendar cal = Calendar.getInstance(Locale.getDefault()); - cal.setTime((Date) value); - calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal)); + final Date date = (Date) value; + calciteLiteral = rexBuilder.makeDateLiteral( + DateString.fromDaysSinceEpoch( + (int) ChronoUnit.DAYS.between(Instant.EPOCH, date.toInstant()))); break; case TIMESTAMP: final TimestampString tsString; @@ -676,9 +675,7 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx tsString = TimestampString.fromCalendarFields((Calendar) value); } else { final Timestamp ts = (Timestamp) value; - final Calendar calt = Calendar.getInstance(Locale.getDefault()); - calt.setTimeInMillis(ts.getTime()); - tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos()); + tsString = TimestampString.fromMillisSinceEpoch(ts.getMillis()).withNanos(ts.getNanos()); } // Must call makeLiteral, not makeTimestampLiteral // to have the RexBuilder.roundTime logic kick in diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 4ddf1d5..f10e275 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -20,8 +20,6 @@ import java.math.BigDecimal; import java.sql.Date; -import java.sql.Timestamp; -import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -38,6 +36,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZUtil; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -49,6 +48,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; +import org.apache.hadoop.hive.ql.lib.ExpressionWalker; import org.apache.hadoop.hive.ql.lib.GraphWalker; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; @@ -57,7 +57,6 @@ import org.apache.hadoop.hive.ql.lib.RuleRegExp; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.lib.ExpressionWalker; import org.apache.hadoop.hive.ql.optimizer.ConstantPropagateProcFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java index f623636..f785b2e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java @@ -25,6 +25,7 @@ import java.util.Map; import java.util.NoSuchElementException; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; @@ -59,13 +60,13 @@ public TimestampWritable evaluate(TimestampWritable t) { if (t == null) { return null; } - final long originalTimestamp = t.getTimestamp().getTime(); // default + final long originalTimestamp = t.getTimestamp().getMillis(); // default final long originalTimestampUTC = new DateTime(originalTimestamp) .withZoneRetainFields(DateTimeZone.UTC).getMillis(); // default -> utc final long newTimestampUTC = granularity.truncate(originalTimestampUTC); // utc final long newTimestamp = new DateTime(newTimestampUTC, DateTimeZone.UTC) .withZoneRetainFields(DateTimeZone.getDefault()).getMillis(); // utc -> default - resultTS.setTime(newTimestamp); + resultTS.set(Timestamp.ofEpochMilli(newTimestamp)); return resultTS; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java index a578b0d..feb41a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java @@ -85,23 +85,22 @@ public IntWritable evaluate(Text dateString) { } } - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; - } - - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.DAY_OF_MONTH)); - return result; - } +// public IntWritable evaluate(DateWritable d) { +// if (d == null) { +// return null; +// } +// +// calendar.setTime(d.get(false)); // Time doesn't matter. +// result.set(calendar.get(Calendar.DAY_OF_MONTH)); +// return result; +// } public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.DAY_OF_MONTH)); + result.set(t.getTimestamp().getLocalDateTime().getDayOfMonth()); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java index 8c3e461..5ac5c5f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java @@ -82,23 +82,22 @@ public IntWritable evaluate(Text dateString) { } } - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; - } - - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.DAY_OF_WEEK)); - return result; - } +// public IntWritable evaluate(DateWritable d) { +// if (d == null) { +// return null; +// } +// +// calendar.setTime(d.get(false)); // Time doesn't matter. +// result.set(calendar.get(Calendar.DAY_OF_WEEK)); +// return result; +// } public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.DAY_OF_WEEK)); + result.set(t.getTimestamp().getLocalDateTime().getDayOfWeek().plus(1).getValue()); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java index bccf5a6..f30a8f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java @@ -96,8 +96,7 @@ public IntWritable evaluate(TimestampWritable t) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.HOUR_OF_DAY)); + result.set(t.getTimestamp().getLocalDateTime().getHour()); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java index 2896385..6017dbe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java @@ -96,8 +96,7 @@ public IntWritable evaluate(TimestampWritable t) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.MINUTE)); + result.set(t.getTimestamp().getLocalDateTime().getMinute()); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java index 759d2b0..621acf0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java @@ -83,23 +83,22 @@ public IntWritable evaluate(Text dateString) { } } - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; - } - - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(1 + calendar.get(Calendar.MONTH)); - return result; - } +// public IntWritable evaluate(DateWritable d) { +// if (d == null) { +// return null; +// } +// +// calendar.setTime(d.get(false)); // Time doesn't matter. +// result.set(1 + calendar.get(Calendar.MONTH)); +// return result; +// } public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(1 + calendar.get(Calendar.MONTH)); + result.set(t.getTimestamp().getLocalDateTime().getMonthValue()); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java index f5ce2b8..bed666d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java @@ -100,8 +100,7 @@ public IntWritable evaluate(TimestampWritable t) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.SECOND)); + result.set(t.getTimestamp().getLocalDateTime().getSecond()); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java index 7deb61b..6a613ae 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java @@ -20,8 +20,10 @@ import java.text.ParseException; import java.text.SimpleDateFormat; +import java.time.temporal.IsoFields; import java.util.Calendar; import java.util.Date; +import java.util.Locale; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; @@ -82,23 +84,22 @@ public IntWritable evaluate(Text dateString) { } } - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; - } - - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.WEEK_OF_YEAR)); - return result; - } +// public IntWritable evaluate(DateWritable d) { +// if (d == null) { +// return null; +// } +// +// calendar.setTime(d.get(false)); // Time doesn't matter. +// result.set(calendar.get(Calendar.WEEK_OF_YEAR)); +// return result; +// } public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.WEEK_OF_YEAR)); + result.set(t.getTimestamp().getLocalDateTime().get(IsoFields.WEEK_OF_WEEK_BASED_YEAR)); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java index d1af121..835698a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java @@ -85,23 +85,22 @@ public IntWritable evaluate(Text dateString) { } } - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; - } - - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.YEAR)); - return result; - } +// public IntWritable evaluate(DateWritable d) { +// if (d == null) { +// return null; +// } +// +// calendar.setTime(d.get(false)); // Time doesn't matter. +// result.set(calendar.get(Calendar.YEAR)); +// return result; +// } public IntWritable evaluate(TimestampWritable t) { if (t == null) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.YEAR)); + result.set(t.getTimestamp().getLocalDateTime().getYear()); return result; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java index 3d85cc4..6e51d5b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -43,7 +44,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.sql.Timestamp; import java.util.List; /** @@ -188,7 +188,7 @@ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveExcep case TIMESTAMP: Timestamp vTimeStamp = ((TimestampObjectInspector)inputOI). getPrimitiveJavaObject(parameters[0]); - bf.addLong(vTimeStamp.getTime()); + bf.addLong(vTimeStamp.getMillis()); break; case CHAR: Text vChar = ((HiveCharObjectInspector)inputOI). diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java index ef8dcf0..597788c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java @@ -20,12 +20,12 @@ import java.io.Closeable; import java.io.IOException; -import java.sql.Timestamp; import java.text.ParseException; import java.util.Date; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -45,7 +45,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java index 2f13a22..cac7278 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -48,7 +49,9 @@ public ObjectInspector initialize(ObjectInspector[] arguments) } if (currentTimestamp == null) { - currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp()); + java.sql.Timestamp ts = SessionState.get().getQueryCurrentTimestamp(); + currentTimestamp = new TimestampWritable( + Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos())); } return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java index 3885abc..e7fcfa4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -119,7 +119,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { case TIMESTAMP: Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())) .getTimestamp(); - output.set(DateWritable.millisToDays(ts.getTime())); + output.set(DateWritable.millisToDays(ts.getMillis())); break; case TIMESTAMPLOCALTZ: case DATE: diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java index 76337a6..8c07f4c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -176,7 +176,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { case TIMESTAMP: Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get())) .getTimestamp(); - output.set(DateWritable.millisToDays(ts.getTime())); + output.set(DateWritable.millisToDays(ts.getMillis())); break; case DATE: DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get()); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java index ac48f01..b4fb855 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java @@ -17,12 +17,11 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; +import java.sql.Date; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.sql.Date; -import java.util.TimeZone; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -39,8 +38,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; /** @@ -120,7 +119,7 @@ private Date convertToDate(PrimitiveCategory inputType, Converter converter, Def case TIMESTAMP: Timestamp ts = ((TimestampWritable) converter.convert(argument.get())) .getTimestamp(); - date.setTime(ts.getTime()); + date.setTime(ts.getMillis()); break; case DATE: DateWritable dw = (DateWritable) converter.convert(argument.get()); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java index b709e95..0733a8d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java @@ -23,6 +23,7 @@ import java.text.SimpleDateFormat; import java.util.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -97,13 +98,15 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { } // the function should support both short date and full timestamp format // time part of the timestamp should not be skipped - Date date = getTimestampValue(arguments, 0, tsConverters); - if (date == null) { + Timestamp ts = getTimestampValue(arguments, 0, tsConverters); + Date date; + if (ts == null) { date = getDateValue(arguments, 0, dtInputTypes, dtConverters); if (date == null) { return null; } } + date = new Date(getTimestampValue(arguments, 0, tsConverters).getMillis()); String res = formatter.format(date); if (res == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java index 33fe507..79ecfeb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java @@ -17,13 +17,11 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; -import java.text.ParseException; -import java.text.SimpleDateFormat; import java.util.TimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.common.type.TimestampTZ; +import org.apache.hadoop.hive.common.type.TimestampTZUtil; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -34,6 +32,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Description(name = "from_utc_timestamp", value = "from_utc_timestamp(timestamp, string timezone) - " @@ -45,7 +45,6 @@ private transient PrimitiveObjectInspector[] argumentOIs; private transient TimestampConverter timestampConverter; private transient TextConverter textConverter; - private transient SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private transient TimeZone tzUTC = TimeZone.getTimeZone("UTC"); @Override @@ -70,26 +69,6 @@ public ObjectInspector initialize(ObjectInspector[] arguments) return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector; } - /** - * Parse the timestamp string using the input TimeZone. - * This does not parse fractional seconds. - * @param tsString - * @param tz - * @return - */ - protected Timestamp timestampFromString(String tsString, TimeZone tz) { - dateFormat.setTimeZone(tz); - try { - java.util.Date date = dateFormat.parse(tsString); - if (date == null) { - return null; - } - return new Timestamp(date.getTime()); - } catch (ParseException err) { - return null; - } - } - @Override public Object evaluate(DeferredObject[] arguments) throws HiveException { Object o0 = arguments[0].get(); @@ -123,21 +102,15 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { // inputTs is the year/month/day/hour/minute/second in the local timezone. // For this UDF we want it in the timezone represented by fromTz - Timestamp fromTs = timestampFromString(inputTs.toString(), fromTz); + TimestampTZ fromTs = TimestampTZUtil.parse(inputTs.toString(), fromTz.toZoneId()); if (fromTs == null) { return null; } // Now output this timestamp's millis value to the equivalent toTz. - dateFormat.setTimeZone(toTz); - Timestamp result = Timestamp.valueOf(dateFormat.format(fromTs)); - - if (inputTs.getNanos() != 0) { - result.setNanos(inputTs.getNanos()); - } - + Timestamp result = new Timestamp( + fromTs.getZonedDateTime().withZoneSameInstant(toTz.toZoneId()).toLocalDateTime()); return result; - } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java index 786db83..057dd96 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -40,7 +41,6 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; -import java.sql.Timestamp; /** * GenericUDF to lookup a value in BloomFilter @@ -153,7 +153,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { case TIMESTAMP: Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector). getPrimitiveJavaObject(arguments[0].get()); - return bloomFilter.testLong(vTimeStamp.getTime()); + return bloomFilter.testLong(vTimeStamp.getMillis()); case CHAR: Text vChar = ((HiveCharObjectInspector) valObjectInspector). getPrimitiveWritableObject(arguments[0].get()).getStrippedValue(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java index e50b4f1..b6b6d93 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java @@ -31,6 +31,7 @@ import java.util.Calendar; import java.util.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -103,21 +104,25 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen public Object evaluate(DeferredObject[] arguments) throws HiveException { // the function should support both short date and full timestamp format // time part of the timestamp should not be skipped - Date date1 = getTimestampValue(arguments, 0, tsConverters); - if (date1 == null) { + Timestamp ts1 = getTimestampValue(arguments, 0, tsConverters); + Date date1; + if (ts1 == null) { date1 = getDateValue(arguments, 0, dtInputTypes, dtConverters); if (date1 == null) { return null; } } + date1 = new Date(ts1.getMillis()); - Date date2 = getTimestampValue(arguments, 1, tsConverters); - if (date2 == null) { + Timestamp ts2 = getTimestampValue(arguments, 1, tsConverters); + Date date2; + if (ts2 == null) { date2 = getDateValue(arguments, 1, dtInputTypes, dtConverters); if (date2 == null) { return null; } } + date2 = new Date(ts2.getMillis()); cal1.setTime(date1); cal2.setTime(date2); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java index 116b3c4..b6e6718 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java @@ -19,12 +19,12 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java index 6417f02..b2f1678 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java @@ -19,12 +19,12 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java index 05e2163..e6b920c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java @@ -20,9 +20,9 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; -import java.sql.Timestamp; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java index 819de77..949f72a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -39,11 +39,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; /** * deterministic version of UDFUnixTimeStamp. enforces argument @@ -158,7 +156,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { } protected static void setValueFromTs(LongWritable value, Timestamp timestamp) { - value.set(timestamp.getTime() / 1000); + value.set(timestamp.getSeconds()); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java index d8368eb..f4da236 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java @@ -20,13 +20,13 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.math.BigDecimal; -import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -310,7 +310,7 @@ private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLength case TIMESTAMP: Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp(); - date = ts; + date = new Date(ts.getMillis()); break; case DATE: DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get()); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java index aaa1bd4..28e3e2b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java @@ -20,6 +20,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -44,7 +45,7 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx } else { if (currentTimestamp == null) { currentTimestamp = new LongWritable(0); - setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp()); + setValueFromTs(currentTimestamp, Timestamp.ofEpochMilli(SessionState.get().getQueryCurrentTimestamp().getTime())); String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead."; SessionState.getConsole().printInfo(msg, false); } diff --git ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java index 98b1ded..5953685 100644 --- ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java +++ ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java @@ -18,12 +18,15 @@ package org.apache.hadoop.hive.ql.util; import java.sql.Date; -import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; import java.util.Calendar; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hive.common.util.DateUtils; @@ -102,7 +105,7 @@ public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(ts, interval, tsResult); return tsResult; @@ -115,9 +118,8 @@ public boolean add(Timestamp ts, HiveIntervalYearMonth interval, Timestamp resul // Attempt to match Oracle semantics for timestamp arithmetic, // where timestamp arithmetic is done in UTC, then converted back to local timezone - long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths()); - result.setTime(resultMillis); - result.setNanos(ts.getNanos()); + long resultMillis = addMonthsToMillisUtc(ts.getMillis(), interval.getTotalMonths()); + result.setTimeInMillis(resultMillis, ts.getNanos()); return true; } @@ -127,7 +129,7 @@ public Timestamp add(HiveIntervalYearMonth interval, Timestamp ts) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(interval, ts, tsResult); return tsResult; @@ -140,9 +142,8 @@ public boolean add(HiveIntervalYearMonth interval, Timestamp ts, Timestamp resul // Attempt to match Oracle semantics for timestamp arithmetic, // where timestamp arithmetic is done in UTC, then converted back to local timezone - long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths()); - result.setTime(resultMillis); - result.setNanos(ts.getNanos()); + long resultMillis = addMonthsToMillisUtc(ts.getMillis(), interval.getTotalMonths()); + result.setTimeInMillis(resultMillis, ts.getNanos()); return true; } @@ -208,7 +209,7 @@ public Timestamp subtract(Timestamp left, HiveIntervalYearMonth right) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); subtract(left, right, tsResult); return tsResult; @@ -255,7 +256,7 @@ public Timestamp add(Timestamp ts, HiveIntervalDayTime interval) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(ts, interval, tsResult); return tsResult; @@ -269,10 +270,9 @@ public boolean add(Timestamp ts, HiveIntervalDayTime interval, nanosResult.addNanos(ts.getNanos(), interval.getNanos()); - long newMillis = ts.getTime() + long newMillis = ts.getMillis() + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds); - result.setTime(newMillis); - result.setNanos(nanosResult.nanos); + result.setTimeInMillis(newMillis, nanosResult.nanos); return true; } @@ -281,7 +281,7 @@ public Timestamp add(HiveIntervalDayTime interval, Timestamp ts) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(interval, ts, tsResult); return tsResult; } @@ -294,10 +294,9 @@ public boolean add(HiveIntervalDayTime interval, Timestamp ts, nanosResult.addNanos(ts.getNanos(), interval.getNanos()); - long newMillis = ts.getTime() + long newMillis = ts.getMillis() + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds); - result.setTime(newMillis); - result.setNanos(nanosResult.nanos); + result.setTimeInMillis(newMillis, nanosResult.nanos); return true; } @@ -373,8 +372,8 @@ public boolean subtract(Timestamp left, Timestamp right, nanosResult.addNanos(left.getNanos(), -(right.getNanos())); - long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime()) - - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds; + long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getMillis()) + - TimeUnit.MILLISECONDS.toSeconds(right.getMillis()) + nanosResult.seconds; result.set(totalSeconds, nanosResult.nanos); return true; } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java index 76ab315..f180893 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java @@ -19,12 +19,12 @@ import static org.junit.Assert.assertEquals; import java.io.IOException; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; @@ -55,7 +55,7 @@ public void testSpillTimestamp() throws HiveException, SerDeException, IOExcepti ObjectInspectorUtils.getStandardObjectInspector(serde.getObjectInspector())); result.setTableDesc( PTFRowContainer.createTableDesc((StructObjectInspector) serde.getObjectInspector())); - TimestampWritable key = new TimestampWritable(new Timestamp(10)); + TimestampWritable key = new TimestampWritable(Timestamp.ofEpochMilli(10)); result.setKeyObject(Lists.newArrayList(key)); List row; // will trigger 2 spills diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java index 2fa9ab2..44504fc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java @@ -20,10 +20,10 @@ import org.junit.Test; -import java.sql.Timestamp; import java.util.Random; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.util.TimestampUtils; import static org.junit.Assert.*; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java index ec392c2..5795c7f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -31,15 +30,16 @@ import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; @@ -74,12 +74,12 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.apache.hive.common.util.DateUtils; -import org.apache.hadoop.io.Text; import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.Text; +import org.apache.hive.common.util.DateUtils; -import com.google.common.base.Preconditions; import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; /** * Generate object inspector and random row object[]. diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java index b091026..9af5ce5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -57,7 +58,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java index eabe54e..61735a6 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java @@ -18,8 +18,17 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.junit.Assert; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.Random; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + import org.apache.commons.lang.ArrayUtils; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch; @@ -33,30 +42,19 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.junit.internal.runners.statements.Fail; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.List; -import java.util.Random; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadFactory; - public class TestVectorDateExpressions { private ExecutorService runner; /* copied over from VectorUDFTimestampFieldLong */ private TimestampWritable toTimestampWritable(long daysSinceEpoch) { - Timestamp ts = new Timestamp(DateWritable.daysToMillis((int) daysSinceEpoch)); + Timestamp ts = Timestamp.ofEpochMilli(DateWritable.daysToMillis((int) daysSinceEpoch)); return new TimestampWritable(ts); } @@ -306,7 +304,7 @@ private void compareToUDFUnixTimeStampDate(long t, long y) { LongWritable res = getLongWritable(tsw); if(res.get() != y) { System.out.printf("%d vs %d for %d, %d\n", res.get(), y, t, - tsw.getTimestamp().getTime()/1000); + tsw.getTimestamp().getSeconds()); } Assert.assertEquals(res.get(), y); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java index 02602f4..15ae96f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Random; -import junit.framework.Assert; - import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; @@ -58,6 +56,8 @@ import org.apache.hadoop.io.Writable; import org.junit.Test; +import junit.framework.Assert; + /** * Unit tests for vector expression writers. */ @@ -115,7 +115,7 @@ private Writable getWritableValue(TypeInfo ti, long value) { } else if (ti.equals(TypeInfoFactory.booleanTypeInfo)) { return new BooleanWritable( value == 0 ? false : true); } else if (ti.equals(TypeInfoFactory.timestampTypeInfo)) { - Timestamp ts = new Timestamp(value); + Timestamp ts = Timestamp.ofEpochMilli(value); TimestampWritable tw = new TimestampWritable(ts); return tw; } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java index b4682f9..80c673e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java @@ -22,9 +22,8 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; @@ -605,7 +604,7 @@ public void testFilterTimestampBetween() { vrb.cols[0] = new TimestampColumnVector(); TimestampColumnVector lcv0 = (TimestampColumnVector) vrb.cols[0]; - Timestamp startTS = new Timestamp(0); // the epoch + Timestamp startTS = new Timestamp(); // the epoch Timestamp endTS = Timestamp.valueOf("2013-11-05 00:00:00.000000000"); Timestamp ts0 = Timestamp.valueOf("1963-11-06 00:00:00.000"); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java index e25dcdf..138c561 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -31,7 +32,6 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.sql.Date; -import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.List; @@ -71,7 +71,7 @@ private TimestampColumnVector toTimestamp(LongColumnVector date) { } private Timestamp toTimestamp(long date) { - return new Timestamp(DateWritable.daysToMillis((int) date)); + return Timestamp.ofEpochMilli(DateWritable.daysToMillis((int) date)); } private BytesColumnVector toString(LongColumnVector date) { @@ -444,7 +444,7 @@ public void testDateDiffScalarCol() { byte[] bytes = "error".getBytes(utf8); VectorizedRowBatch batch = new VectorizedRowBatch(2, 1); - udf = new VectorUDFDateDiffScalarCol(new Timestamp(0), 0, 1); + udf = new VectorUDFDateDiffScalarCol(new Timestamp(), 0, 1); udf.setInputTypes(VectorExpression.Type.TIMESTAMP, VectorExpression.Type.STRING); batch.cols[0] = new BytesColumnVector(1); batch.cols[1] = new LongColumnVector(1); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java index 41f2621..04faf77 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; import java.io.UnsupportedEncodingException; -import java.sql.Timestamp; import java.util.Arrays; import java.util.Random; -import junit.framework.Assert; - import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -57,6 +55,8 @@ import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.junit.Test; +import junit.framework.Assert; + public class TestVectorMathFunctions { diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java index d4f1f6f..67943c2 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java @@ -20,19 +20,15 @@ import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; -import java.util.List; import java.util.Random; -import junit.framework.Assert; - -import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -48,10 +44,11 @@ import org.apache.hadoop.hive.ql.udf.UDFYear; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.junit.Test; +import junit.framework.Assert; + /** * Unit tests for timestamp expressions. */ @@ -72,11 +69,11 @@ long before = exactly - 1000; long after = exactly + 1000; if (minYear != 0) { - boundaries.add(new Timestamp(before)); + boundaries.add(Timestamp.ofEpochMilli(before)); } - boundaries.add(new Timestamp(exactly)); + boundaries.add(Timestamp.ofEpochMilli(exactly)); if (year != maxYear) { - boundaries.add(new Timestamp(after)); + boundaries.add(Timestamp.ofEpochMilli(after)); } } return boundaries.toArray(new Timestamp[0]); @@ -187,7 +184,7 @@ private VectorizedRowBatch getVectorizedRowBatch(Timestamp[] inputs, int size, T private byte[] encodeTime(Timestamp timestamp) { ByteBuffer encoded; - long time = timestamp.getTime(); + long time = timestamp.getMillis(); try { String formatted = dateFormat.format(new Date(time)); encoded = Text.encode(formatted); @@ -199,7 +196,7 @@ private VectorizedRowBatch getVectorizedRowBatch(Timestamp[] inputs, int size, T private Timestamp decodeTime(byte[] time) { try { - return new Timestamp(dateFormat.parse(Text.decode(time)).getTime()); + return Timestamp.ofEpochMilli(dateFormat.parse(Text.decode(time)).getTime()); } catch (Exception e) { throw new RuntimeException(e); } @@ -226,7 +223,7 @@ private void compareToUDFYearLong(Timestamp t, int y) { IntWritable res = udf.evaluate(tsw); if (res.get() != y) { System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(), - tsw.getTimestamp().getTime()/1000); + tsw.getTimestamp().getSeconds()); } Assert.assertEquals(res.get(), y); } @@ -259,7 +256,7 @@ private void verifyUDFYear(VectorizedRowBatch batch, TestType testType) { } private void testVectorUDFYear(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -275,14 +272,14 @@ private void testVectorUDFYear(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFYear(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -351,7 +348,7 @@ private void verifyUDFDayOfMonth(VectorizedRowBatch batch, TestType testType) { } private void testVectorUDFDayOfMonth(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -367,14 +364,14 @@ private void testVectorUDFDayOfMonth(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFDayOfMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFDayOfMonth(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFDayOfMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -436,7 +433,7 @@ private void verifyUDFHour(VectorizedRowBatch batch, TestType testType) { } private void testVectorUDFHour(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -452,14 +449,14 @@ private void testVectorUDFHour(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFHour(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFHour(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFHour(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -521,7 +518,7 @@ private void verifyUDFMinute(VectorizedRowBatch batch, TestType testType) { } private void testVectorUDFMinute(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -537,14 +534,14 @@ private void testVectorUDFMinute(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFMinute(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFMinute(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFMinute(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -606,7 +603,7 @@ private void verifyUDFMonth(VectorizedRowBatch batch, TestType testType) { } private void testVectorUDFMonth(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -622,14 +619,14 @@ private void testVectorUDFMonth(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFMonth(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -691,7 +688,7 @@ private void verifyUDFSecond(VectorizedRowBatch batch, TestType testType) { } private void testVectorUDFSecond(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -707,14 +704,14 @@ private void testVectorUDFSecond(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFSecond(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFSecond(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFSecond(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -742,7 +739,7 @@ public void testVectorUDFSecondString() { } private void compareToUDFUnixTimeStampLong(Timestamp ts, long y) { - long seconds = ts.getTime() / 1000; + long seconds = ts.getSeconds(); if(seconds != y) { System.out.printf("%d vs %d for %s\n", seconds, y, ts.toString()); Assert.assertTrue(false); @@ -777,7 +774,7 @@ private void verifyUDFUnixTimeStamp(VectorizedRowBatch batch, TestType testType) } private void testVectorUDFUnixTimeStamp(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -793,14 +790,14 @@ private void testVectorUDFUnixTimeStamp(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFUnixTimeStamp(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFUnixTimeStamp(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFUnixTimeStamp(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -859,7 +856,7 @@ private void verifyUDFWeekOfYear(VectorizedRowBatch batch, TestType testType) { } private void testVectorUDFWeekOfYear(TestType testType) { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -875,14 +872,14 @@ private void testVectorUDFWeekOfYear(TestType testType) { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFWeekOfYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFWeekOfYear(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFWeekOfYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java index 887f090..9b743a5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java @@ -22,33 +22,30 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.io.UnsupportedEncodingException; import java.math.BigDecimal; -import java.math.MathContext; -import java.math.RoundingMode; -import java.sql.Timestamp; -import java.util.Arrays; import java.util.Random; import java.util.concurrent.TimeUnit; -import junit.framework.Assert; - import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*; -import org.apache.hadoop.hive.ql.exec.vector.expressions.*; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToBooleanViaDoubleToLong; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanViaLongToLong; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToDouble; import org.apache.hadoop.hive.ql.util.TimestampUtils; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; -import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils; import org.junit.Test; +import junit.framework.Assert; + /** * Test VectorExpression classes for vectorized implementations of type casts. */ @@ -84,7 +81,7 @@ public void testCastDateToTimestamp() { expr.evaluate(b); for (int i = 0; i < intValues.length; i++) { Timestamp timestamp = resultV.asScratchTimestamp(i); - long actual = DateWritable.millisToDays(timestamp.getTime()); + long actual = DateWritable.millisToDays(timestamp.getMillis()); assertEquals(actual, intValues[i]); } } @@ -435,9 +432,8 @@ private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) { Random r = new Random(94830); for (int i = 0; i < doubleValues.length; i++) { long millis = RandomTypeUtil.randomMillis(r); - Timestamp ts = new Timestamp(millis); int nanos = RandomTypeUtil.randomNanos(r); - ts.setNanos(nanos); + Timestamp ts = Timestamp.ofEpochMilli(millis, nanos); TimestampWritable tsw = new TimestampWritable(ts); double asDouble = tsw.getDouble(); doubleValues[i] = asDouble; @@ -500,8 +496,7 @@ private VectorizedRowBatch getBatchTimestampDecimal(HiveDecimal[] hiveDecimalVal break; } long millis = RandomTypeUtil.randomMillis(r); - Timestamp ts = new Timestamp(millis); - ts.setNanos(optionalNanos); + Timestamp ts = Timestamp.ofEpochMilli(millis, optionalNanos); TimestampWritable tsw = new TimestampWritable(ts); hiveDecimalValues[i] = tsw.getHiveDecimal(); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java index 137df12..6fa807a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java @@ -20,20 +20,18 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; -import junit.framework.TestCase; - import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -63,7 +61,8 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; + +import junit.framework.TestCase; /** * (Copy of VerifyFast from serde). diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java index 98849c3..1b25613 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java @@ -18,15 +18,14 @@ package org.apache.hadoop.hive.ql.exec.vector.util; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.regex.MatchResult; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java index 1f50282..ede6016 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hive.ql.exec.vector.util; -import java.sql.Timestamp; import java.util.Random; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java index 4d3b4a7..3eec9f1 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java @@ -18,21 +18,19 @@ package org.apache.hadoop.hive.ql.exec.vector.util.batchgen; -import java.sql.Timestamp; import java.util.Arrays; import java.util.Random; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory; -import org.apache.hadoop.io.BooleanWritable; -import org.apache.hadoop.io.Text; public class VectorColumnGroupGenerator { diff --git ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java index dae2a5f..aca5da4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java @@ -29,7 +29,6 @@ import java.math.BigInteger; import java.nio.ByteBuffer; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -44,6 +43,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory; diff --git ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java index ec6def5..1e40122 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java @@ -13,19 +13,18 @@ */ package org.apache.hadoop.hive.ql.io.parquet.serde; -import java.sql.Timestamp; import java.util.Calendar; -import java.util.Date; import java.util.GregorianCalendar; import java.util.TimeZone; import java.util.concurrent.TimeUnit; -import junit.framework.Assert; -import junit.framework.TestCase; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; +import junit.framework.Assert; +import junit.framework.TestCase; + /** @@ -42,7 +41,7 @@ public void testJulianDay() { cal.set(Calendar.HOUR_OF_DAY, 0); cal.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts = new Timestamp(cal.getTimeInMillis()); + Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis()); NanoTime nt = NanoTimeUtils.getNanoTime(ts, false); Assert.assertEquals(nt.getJulianDay(), 2440000); @@ -57,7 +56,7 @@ public void testJulianDay() { cal1.set(Calendar.HOUR_OF_DAY, 0); cal1.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts1 = new Timestamp(cal1.getTimeInMillis()); + Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis()); NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, false); Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false); @@ -70,7 +69,7 @@ public void testJulianDay() { cal2.set(Calendar.HOUR_OF_DAY, 0); cal2.setTimeZone(TimeZone.getTimeZone("UTC")); - Timestamp ts2 = new Timestamp(cal2.getTimeInMillis()); + Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis()); NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, false); Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false); @@ -86,7 +85,7 @@ public void testJulianDay() { cal1.set(Calendar.HOUR_OF_DAY, 0); cal1.setTimeZone(TimeZone.getTimeZone("GMT")); - ts1 = new Timestamp(cal1.getTimeInMillis()); + ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis()); nt1 = NanoTimeUtils.getNanoTime(ts1, false); ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false); @@ -99,7 +98,7 @@ public void testJulianDay() { cal2.set(Calendar.HOUR_OF_DAY, 0); cal2.setTimeZone(TimeZone.getTimeZone("UTC")); - ts2 = new Timestamp(cal2.getTimeInMillis()); + ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis()); nt2 = NanoTimeUtils.getNanoTime(ts2, false); ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false); @@ -117,8 +116,7 @@ public void testNanos() { cal.set(Calendar.MINUTE, 1); cal.set(Calendar.SECOND, 1); cal.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts = new Timestamp(cal.getTimeInMillis()); - ts.setNanos(1); + Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1); //(1*60*60 + 1*60 + 1) * 10e9 + 1 NanoTime nt = NanoTimeUtils.getNanoTime(ts, false); @@ -133,8 +131,7 @@ public void testNanos() { cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.setTimeZone(TimeZone.getTimeZone("GMT")); - ts = new Timestamp(cal.getTimeInMillis()); - ts.setNanos(999999999); + ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 999999999); //(23*60*60 + 59*60 + 59)*10e9 + 999999999 nt = NanoTimeUtils.getNanoTime(ts, false); @@ -149,8 +146,7 @@ public void testNanos() { cal2.set(Calendar.MINUTE, 10); cal2.set(Calendar.SECOND, 0); cal2.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts2 = new Timestamp(cal2.getTimeInMillis()); - ts2.setNanos(10); + Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis(), 10); Calendar cal1 = Calendar.getInstance(); cal1.set(Calendar.YEAR, 1968); @@ -160,8 +156,7 @@ public void testNanos() { cal1.set(Calendar.MINUTE, 0); cal1.set(Calendar.SECOND, 0); cal1.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts1 = new Timestamp(cal1.getTimeInMillis()); - ts1.setNanos(1); + Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis(), 1); NanoTime n2 = NanoTimeUtils.getNanoTime(ts2, false); NanoTime n1 = NanoTimeUtils.getNanoTime(ts1, false); @@ -183,8 +178,7 @@ public void testTimezone() { cal.set(Calendar.MINUTE, 1); cal.set(Calendar.SECOND, 1); cal.setTimeZone(TimeZone.getTimeZone("US/Pacific")); - Timestamp ts = new Timestamp(cal.getTimeInMillis()); - ts.setNanos(1); + Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1); /** * 17:00 PDT = 00:00 GMT (daylight-savings) diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java index ccccd32..cdf8f04 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hive.ql.udf; -import java.sql.Timestamp; import java.time.Instant; import java.time.ZoneId; -import java.time.ZoneOffset; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; @@ -37,56 +36,56 @@ public void testTimestampToTimestampWithGranularity() throws Exception { // Running example // Friday 30th August 1985 02:47:02 AM - final TimestampWritable t = new TimestampWritable(new Timestamp(494243222000L)); + final TimestampWritable t = new TimestampWritable(Timestamp.ofEpochMilli(494243222000L)); UDFDateFloor g; // Year granularity // Tuesday 1st January 1985 12:00:00 AM g = new UDFDateFloorYear(); TimestampWritable i1 = g.evaluate(t); - assertEquals(473414400000L, i1.getTimestamp().getTime()); + assertEquals(473414400000L, i1.getTimestamp().getMillis()); // Quarter granularity // Monday 1st July 1985 12:00:00 AM g = new UDFDateFloorQuarter(); TimestampWritable i2 = g.evaluate(t); - assertEquals(489049200000L, i2.getTimestamp().getTime()); + assertEquals(489049200000L, i2.getTimestamp().getMillis()); // Month granularity // Thursday 1st August 1985 12:00:00 AM g = new UDFDateFloorMonth(); TimestampWritable i3 = g.evaluate(t); - assertEquals(491727600000L, i3.getTimestamp().getTime()); + assertEquals(491727600000L, i3.getTimestamp().getMillis()); // Week granularity // Monday 26th August 1985 12:00:00 AM g = new UDFDateFloorWeek(); TimestampWritable i4 = g.evaluate(t); - assertEquals(493887600000L, i4.getTimestamp().getTime()); + assertEquals(493887600000L, i4.getTimestamp().getMillis()); // Day granularity // Friday 30th August 1985 12:00:00 AM g = new UDFDateFloorDay(); TimestampWritable i5 = g.evaluate(t); - assertEquals(494233200000L, i5.getTimestamp().getTime()); + assertEquals(494233200000L, i5.getTimestamp().getMillis()); // Hour granularity // Friday 30th August 1985 02:00:00 AM g = new UDFDateFloorHour(); TimestampWritable i6 = g.evaluate(t); - assertEquals(494240400000L, i6.getTimestamp().getTime()); + assertEquals(494240400000L, i6.getTimestamp().getMillis()); // Minute granularity // Friday 30th August 1985 02:47:00 AM g = new UDFDateFloorMinute(); TimestampWritable i7 = g.evaluate(t); - assertEquals(494243220000L, i7.getTimestamp().getTime()); + assertEquals(494243220000L, i7.getTimestamp().getMillis()); // Second granularity // Friday 30th August 1985 02:47:02 AM g = new UDFDateFloorSecond(); TimestampWritable i8 = g.evaluate(t); - assertEquals(494243222000L, i8.getTimestamp().getTime()); + assertEquals(494243222000L, i8.getTimestamp().getMillis()); } @Test diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java index 7706c07..9982fa4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java @@ -19,21 +19,21 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDate; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDate extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDate udf = new GenericUDFDate(); @@ -59,8 +59,8 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI}; udf.initialize(arguments); - DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 30, 4, 17, 52, 0))); + DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 30, 4, 17, 52, 0)))); DeferredObject[] args = {valueObj}; DateWritable output = (DateWritable) udf.evaluate(args); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java index 6dc4c34..82c8e1f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java @@ -19,20 +19,20 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDateAdd extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDateAdd udf = new GenericUDFDateAdd(); @@ -66,8 +66,8 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 20, 4, 17, 52, 0))); + DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0)))); DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java index af45af7..34b68f1 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -34,6 +32,8 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDateDiff extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDateDiff udf = new GenericUDFDateDiff(); @@ -67,10 +67,10 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 20, 0, 0, 0, 0))); - DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 17, 0, 0, 0, 0))); + DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 20, 0, 0, 0, 0)))); + DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 17, 0, 0, 0, 0)))); DeferredObject[] args = {valueObj1, valueObj2}; IntWritable output = (IntWritable) udf.evaluate(args); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java index db53a49..cb0acb9 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java index a8b4e7f..9e1233e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java @@ -19,20 +19,20 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDateSub extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDateSub udf = new GenericUDFDateSub(); @@ -66,8 +66,8 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 20, 4, 17, 52, 0))); + DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0)))); DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java index 65f706a..8e1949e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java @@ -17,10 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; - -import junit.framework.TestCase; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; @@ -29,6 +26,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFLastDay extends TestCase { public void testLastDay() throws HiveException { diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java index 224047d..640951c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java index b060877..439c871 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java index b49f6ef..2a103a1 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java index 5d38fd6..2266824 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java index 52d30d3..f562617 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; @@ -66,12 +66,12 @@ public void testTimestamp() throws HiveException { Timestamp ts = Timestamp.valueOf("1970-01-01 00:00:00"); runAndVerify(udf, new TimestampWritable(ts), - new LongWritable(ts.getTime() / 1000)); + new LongWritable(ts.getSeconds())); ts = Timestamp.valueOf("2001-02-03 01:02:03"); runAndVerify(udf, new TimestampWritable(ts), - new LongWritable(ts.getTime() / 1000)); + new LongWritable(ts.getSeconds())); // test null values runAndVerify(udf, null, null); @@ -101,7 +101,7 @@ public void testString() throws HiveException { String val = "2001-01-01 01:02:03"; runAndVerify(udf1, new Text(val), - new LongWritable(Timestamp.valueOf(val).getTime() / 1000)); + new LongWritable(Timestamp.valueOf(val).getSeconds())); // test null values runAndVerify(udf1, null, null); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java index 66bd7b4..7f3a1ff 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java index 35fe941..d8fbe83 100644 --- ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java +++ ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.ql.util; import java.sql.Date; -import java.sql.Timestamp; import java.util.TimeZone; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.junit.*; diff --git ql/src/test/queries/clientpositive/timestamp_comparison2.q ql/src/test/queries/clientpositive/timestamp_comparison2.q index d41cc83..a93849c 100644 --- ql/src/test/queries/clientpositive/timestamp_comparison2.q +++ ql/src/test/queries/clientpositive/timestamp_comparison2.q @@ -16,8 +16,8 @@ FROM alltypesorc WHERE ((ctinyint != 0) AND - (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) + (((ctimestamp1 <= timestamp('1970-01-01 00:00:00')) OR ((ctinyint = cint) OR (cstring2 LIKE 'ss'))) AND ((988888 < cdouble) - OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble))))) + OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble))))) ; diff --git ql/src/test/queries/clientpositive/timestamp_dst.q ql/src/test/queries/clientpositive/timestamp_dst.q new file mode 100644 index 0000000..4dda5a9 --- /dev/null +++ ql/src/test/queries/clientpositive/timestamp_dst.q @@ -0,0 +1,2 @@ +select TIMESTAMP '2015-03-08 02:10:00.101'; + diff --git ql/src/test/results/clientpositive/timestamp_1.q.out ql/src/test/results/clientpositive/timestamp_1.q.out index d3ca5cf..1b88d3d 100644 --- ql/src/test/results/clientpositive/timestamp_1.q.out +++ ql/src/test/results/clientpositive/timestamp_1.q.out @@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.2938724611E9 +1.2938436611E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.293872461001E9 +1.293843661001E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 diff --git ql/src/test/results/clientpositive/timestamp_2.q.out ql/src/test/results/clientpositive/timestamp_2.q.out index f9bfb09..15c8b76 100644 --- ql/src/test/results/clientpositive/timestamp_2.q.out +++ ql/src/test/results/clientpositive/timestamp_2.q.out @@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.2938724611E9 +1.2938436611E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.293872461001E9 +1.293843661001E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 diff --git ql/src/test/results/clientpositive/timestamp_3.q.out ql/src/test/results/clientpositive/timestamp_3.q.out index 0664abf..9218501 100644 --- ql/src/test/results/clientpositive/timestamp_3.q.out +++ ql/src/test/results/clientpositive/timestamp_3.q.out @@ -100,7 +100,7 @@ POSTHOOK: query: select cast(t as string) from timestamp_3 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_3 #### A masked pattern was here #### -2011-04-29 20:46:56.4485 +2011-04-30 03:46:56.4485 PREHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timestamp_3 group by t PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_3 @@ -109,7 +109,7 @@ POSTHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timest POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_3 #### A masked pattern was here #### -2011-04-29 20:46:56.4485 1.3041352164485E9 1 1.3041352164485E9 1.3041352164485E9 +2011-04-30 03:46:56.4485 1.3041352164485E9 1 1.3041352164485E9 1.3041352164485E9 PREHOOK: query: drop table timestamp_3 PREHOOK: type: DROPTABLE PREHOOK: Input: default@timestamp_3 diff --git ql/src/test/results/clientpositive/timestamp_comparison2.q.out ql/src/test/results/clientpositive/timestamp_comparison2.q.out index 8ef2552..7c83874 100644 --- ql/src/test/results/clientpositive/timestamp_comparison2.q.out +++ ql/src/test/results/clientpositive/timestamp_comparison2.q.out @@ -28,10 +28,10 @@ FROM alltypesorc WHERE ((ctinyint != 0) AND - (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) + (((ctimestamp1 <= timestamp('1970-01-01 00:00:00')) OR ((ctinyint = cint) OR (cstring2 LIKE 'ss'))) AND ((988888 < cdouble) - OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble))))) + OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble))))) PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc #### A masked pattern was here #### @@ -40,10 +40,10 @@ FROM alltypesorc WHERE ((ctinyint != 0) AND - (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) + (((ctimestamp1 <= timestamp('1970-01-01 00:00:00')) OR ((ctinyint = cint) OR (cstring2 LIKE 'ss'))) AND ((988888 < cdouble) - OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble))))) + OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble))))) POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/timestamp_dst.q.out ql/src/test/results/clientpositive/timestamp_dst.q.out new file mode 100644 index 0000000..72a5ebf --- /dev/null +++ ql/src/test/results/clientpositive/timestamp_dst.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101' +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101' +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2015-03-08 02:10:00.101 diff --git ql/src/test/results/clientpositive/timestamp_ints_casts.q.out ql/src/test/results/clientpositive/timestamp_ints_casts.q.out index bc5ceb3..f4be734 100644 --- ql/src/test/results/clientpositive/timestamp_ints_casts.q.out +++ ql/src/test/results/clientpositive/timestamp_ints_casts.q.out @@ -104,32 +104,32 @@ where cbigint % 250 = 0 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1969-12-08 10:43:03.25 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL -1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1970-01-19 04:24:39 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL -1969-12-31 15:59:59.97 1969-12-31 15:59:59.8 NULL 1970-01-17 05:10:52.25 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL -1969-12-31 15:59:59.949 NULL 1970-01-09 14:53:20.971 1970-01-12 20:45:23.25 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 15:59:59.949 NULL 1970-01-09 07:39:13.882 1969-12-09 07:45:32.75 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 16:00:00.02 1969-12-31 16:00:15.601 NULL 1969-12-27 11:19:26.75 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL -1969-12-31 15:59:59.962 1969-12-31 16:00:15.601 NULL 1969-12-10 03:41:51 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL -1969-12-31 15:59:59.995 1969-12-31 16:00:15.601 NULL 1970-01-07 18:06:56 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL -1969-12-31 16:00:00.048 1969-12-31 16:00:15.601 NULL 1969-12-22 11:03:59 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL -1969-12-31 16:00:00.008 NULL 1969-12-24 00:12:58.862 1969-12-20 21:16:47.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1969-12-30 11:24:23.566 1969-12-16 11:20:17.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1970-01-09 23:39:39.664 1970-01-10 17:09:21.5 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1969-12-23 21:59:27.689 1970-01-19 01:16:31.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1970-01-10 23:29:48.972 1969-12-10 02:41:39 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1970-01-11 10:34:27.246 1970-01-14 14:49:59.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 15:59:59.941 1969-12-31 15:59:52.804 NULL 1969-12-13 02:11:50 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL -1969-12-31 15:59:59.979 1969-12-31 15:59:52.804 NULL 1970-01-18 12:27:09 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL -1969-12-31 15:59:59.94 1969-12-31 15:59:52.804 NULL 1970-01-18 05:11:54.75 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL -1969-12-31 15:59:59.986 1969-12-31 15:59:52.804 NULL 1969-12-13 16:50:00.5 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL -1969-12-31 16:00:00.059 1969-12-31 15:59:52.804 NULL 1969-12-18 11:57:25.5 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL -1969-12-31 15:59:59.992 1969-12-31 15:59:52.804 NULL 1969-12-10 06:06:48.5 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL -1969-12-31 16:00:00.005 1969-12-31 15:59:52.804 NULL 1969-12-19 21:53:12.5 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL -1969-12-31 15:59:59.976 1969-12-31 15:59:52.804 NULL 1970-01-10 06:18:31 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL -1969-12-31 15:59:59.95 1969-12-31 15:59:52.804 NULL 1969-12-19 17:33:32.75 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL -1969-12-31 16:00:00.011 NULL 1969-12-30 22:03:04.018 1970-01-21 12:50:53.75 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL -1969-12-31 16:00:00.011 NULL 1969-12-27 18:49:09.583 1970-01-14 22:35:27 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL +1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1969-12-08 18:43:03.25 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.748 NULL NULL +1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1970-01-19 12:24:39 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.817 NULL NULL +1969-12-31 23:59:59.97 1969-12-31 23:59:59.8 NULL 1970-01-17 13:10:52.25 1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1970-01-01 00:00:12.935 NULL NULL +1969-12-31 23:59:59.949 NULL 1970-01-09 22:53:20.971 1970-01-13 04:45:23.25 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1969-12-31 23:59:59.949 NULL 1970-01-09 15:39:13.882 1969-12-09 15:45:32.75 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1970-01-01 00:00:00.02 1970-01-01 00:00:15.601 NULL 1969-12-27 19:19:26.75 1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.129 NULL NULL +1969-12-31 23:59:59.962 1970-01-01 00:00:15.601 NULL 1969-12-10 11:41:51 1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:58.614 NULL NULL +1969-12-31 23:59:59.995 1970-01-01 00:00:15.601 NULL 1970-01-08 02:06:56 1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1970-01-01 00:00:04.679 NULL NULL +1970-01-01 00:00:00.048 1970-01-01 00:00:15.601 NULL 1969-12-22 19:03:59 1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:50.235 NULL NULL +1970-01-01 00:00:00.008 NULL 1969-12-24 08:12:58.862 1969-12-21 05:16:47.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1969-12-30 19:24:23.566 1969-12-16 19:20:17.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1970-01-10 07:39:39.664 1970-01-11 01:09:21.5 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1969-12-24 05:59:27.689 1970-01-19 09:16:31.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1970-01-11 07:29:48.972 1969-12-10 10:41:39 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1970-01-11 18:34:27.246 1970-01-14 22:49:59.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1969-12-31 23:59:59.941 1969-12-31 23:59:52.804 NULL 1969-12-13 10:11:50 1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:13.15 NULL NULL +1969-12-31 23:59:59.979 1969-12-31 23:59:52.804 NULL 1970-01-18 20:27:09 1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:55.9 NULL NULL +1969-12-31 23:59:59.94 1969-12-31 23:59:52.804 NULL 1970-01-18 13:11:54.75 1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:52.408 NULL NULL +1969-12-31 23:59:59.986 1969-12-31 23:59:52.804 NULL 1969-12-14 00:50:00.5 1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:11.065 NULL NULL +1970-01-01 00:00:00.059 1969-12-31 23:59:52.804 NULL 1969-12-18 19:57:25.5 1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.956 NULL NULL +1969-12-31 23:59:59.992 1969-12-31 23:59:52.804 NULL 1969-12-10 14:06:48.5 1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:03.136 NULL NULL +1970-01-01 00:00:00.005 1969-12-31 23:59:52.804 NULL 1969-12-20 05:53:12.5 1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.973 NULL NULL +1969-12-31 23:59:59.976 1969-12-31 23:59:52.804 NULL 1970-01-10 14:18:31 1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.145 NULL NULL +1969-12-31 23:59:59.95 1969-12-31 23:59:52.804 NULL 1969-12-20 01:33:32.75 1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:54.733 NULL NULL +1970-01-01 00:00:00.011 NULL 1969-12-31 06:03:04.018 1970-01-21 20:50:53.75 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL +1970-01-01 00:00:00.011 NULL 1969-12-28 02:49:09.583 1970-01-15 06:35:27 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL PREHOOK: query: explain select @@ -236,29 +236,29 @@ where cbigint % 250 = 0 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1906-06-05 13:34:10 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL -1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 2020-09-11 19:50:00 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL -1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 2015-04-23 22:10:50 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL -1969-12-31 15:59:09 NULL 1994-07-07 10:09:31 2003-05-25 21:27:30 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 15:59:09 NULL 1993-09-08 22:51:22 1908-10-29 07:05:50 1969-12-31 15:59:09 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1958-07-07 21:05:50 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL -1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1911-02-07 01:30:00 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL -1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1989-05-28 20:33:20 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL -1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1944-10-18 03:23:20 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL -1969-12-31 16:00:08 NULL 1949-01-13 00:21:02 1940-06-26 15:47:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1966-09-27 07:32:46 1928-05-26 10:07:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1995-07-07 22:01:04 1997-07-05 20:58:20 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1948-10-12 08:01:29 2020-05-04 04:20:50 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1998-03-27 00:56:12 1910-12-27 06:10:00 1969-12-31 16:00:08 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1999-07-01 15:14:06 2008-03-13 02:07:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1919-02-22 13:13:20 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL -1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 2018-11-16 20:30:00 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL -1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 2018-01-18 14:32:30 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL -1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1920-10-24 09:28:20 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL -1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1933-12-12 05:05:00 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL -1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1911-05-18 17:28:20 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL -1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1937-10-25 22:48:20 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL -1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1996-04-09 21:36:40 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL -1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1937-04-28 15:05:50 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL -1969-12-31 16:00:11 NULL 1967-12-14 19:06:58 2027-02-19 08:15:50 1969-12-31 16:00:11 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL -1969-12-31 16:00:11 NULL 1959-05-16 04:19:43 2009-01-30 06:50:00 1969-12-31 16:00:11 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL +1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1906-06-05 21:34:10 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.748 NULL NULL +1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 2020-09-12 02:50:00 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.817 NULL NULL +1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 2015-04-24 05:10:50 1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1970-01-01 00:00:12.935 NULL NULL +1969-12-31 23:59:09 NULL 1994-07-07 17:09:31 2003-05-26 04:27:30 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1969-12-31 23:59:09 NULL 1993-09-09 05:51:22 1908-10-29 15:05:50 1969-12-31 23:59:09 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1958-07-08 04:05:50 1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.129 NULL NULL +1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1911-02-07 09:30:00 1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:58.614 NULL NULL +1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1989-05-29 03:33:20 1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1970-01-01 00:00:04.679 NULL NULL +1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1944-10-18 10:23:20 1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:50.235 NULL NULL +1970-01-01 00:00:08 NULL 1949-01-13 08:21:02 1940-06-26 23:47:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1966-09-27 14:32:46 1928-05-26 18:07:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1995-07-08 05:01:04 1997-07-06 03:58:20 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1948-10-12 15:01:29 2020-05-04 11:20:50 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1998-03-27 08:56:12 1910-12-27 14:10:00 1970-01-01 00:00:08 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1999-07-01 22:14:06 2008-03-13 09:07:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1919-02-22 21:13:20 1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:13.15 NULL NULL +1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 2018-11-17 04:30:00 1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:55.9 NULL NULL +1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 2018-01-18 22:32:30 1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:52.408 NULL NULL +1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1920-10-24 17:28:20 1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:11.065 NULL NULL +1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1933-12-12 13:05:00 1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.956 NULL NULL +1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1911-05-19 01:28:20 1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:03.136 NULL NULL +1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1937-10-26 06:48:20 1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.973 NULL NULL +1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1996-04-10 04:36:40 1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.145 NULL NULL +1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1937-04-28 23:05:50 1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:54.733 NULL NULL +1970-01-01 00:00:11 NULL 1967-12-15 03:06:58 2027-02-19 16:15:50 1970-01-01 00:00:11 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL +1970-01-01 00:00:11 NULL 1959-05-16 11:19:43 2009-01-30 14:50:00 1970-01-01 00:00:11 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL diff --git ql/src/test/results/clientpositive/timestamp_udf.q.out ql/src/test/results/clientpositive/timestamp_udf.q.out index 47f84cb..9f70522 100644 --- ql/src/test/results/clientpositive/timestamp_udf.q.out +++ ql/src/test/results/clientpositive/timestamp_udf.q.out @@ -54,7 +54,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_udf #### A masked pattern was here #### -1304690889 2011 5 6 6 18 7 8 9 2011-05-06 +1304665689 2011 5 6 6 18 7 8 9 2011-05-06 PREHOOK: query: select date_add(t, 5), date_sub(t, 10) from timestamp_udf PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/timestamptz_3.q.out ql/src/test/results/clientpositive/timestamptz_3.q.out index 196c584..68affaf 100644 --- ql/src/test/results/clientpositive/timestamptz_3.q.out +++ ql/src/test/results/clientpositive/timestamptz_3.q.out @@ -36,7 +36,7 @@ POSTHOOK: query: select cast(to_epoch_milli(t) as timestamp) from tstz1 POSTHOOK: type: QUERY POSTHOOK: Input: default@tstz1 #### A masked pattern was here #### -2016-01-03 12:26:34 +2016-01-03 20:26:34 PREHOOK: query: select cast(t as timestamp) from tstz1 PREHOOK: type: QUERY PREHOOK: Input: default@tstz1 @@ -54,4 +54,4 @@ POSTHOOK: query: select cast(to_epoch_milli(t) as timestamp) from tstz1 POSTHOOK: type: QUERY POSTHOOK: Input: default@tstz1 #### A masked pattern was here #### -2016-01-03 12:26:34 +2016-01-03 20:26:34 diff --git serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java index 83e5d68..48366fd 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java +++ serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.serde2.avro; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -28,15 +27,13 @@ import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Type; -import org.apache.avro.generic.GenericArray; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericData.Fixed; import org.apache.avro.generic.GenericEnumSymbol; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -214,7 +211,7 @@ private Object serializePrimitive(TypeInfo typeInfo, PrimitiveObjectInspector fi case TIMESTAMP: Timestamp timestamp = ((TimestampObjectInspector) fieldOI).getPrimitiveJavaObject(structFieldData); - return timestamp.getTime(); + return timestamp.getMillis(); case UNKNOWN: throw new AvroSerdeException("Received UNKNOWN primitive category."); case VOID: diff --git serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java index 5be7714..8d759af 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java +++ serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.sql.Date; -import java.sql.Timestamp; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -30,6 +29,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; diff --git serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java index 89bcf4f..ea601a5 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java +++ serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.sql.Date; -import java.sql.Timestamp; import java.util.List; import java.util.Map; @@ -30,6 +29,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; /* diff --git serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java index 6866d49..4e29014 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java +++ serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java @@ -20,13 +20,10 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import java.sql.Timestamp; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Date; +import java.time.format.DateTimeFormatter; import org.apache.hadoop.hive.common.type.HiveDecimal; -import org.apache.hadoop.hive.ql.util.TimestampUtils; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -35,7 +32,6 @@ /** * TimestampWritable - * Writable equivalent of java.sq.Timestamp * * Timestamps are of the format * YYYY-MM-DD HH:MM:SS.[fff...] @@ -66,15 +62,15 @@ public static final int BINARY_SORTABLE_LENGTH = 11; - private static final ThreadLocal threadLocalDateFormat = - new ThreadLocal() { + private static final ThreadLocal threadLocalDateFormat = + new ThreadLocal() { @Override - protected DateFormat initialValue() { - return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + protected DateTimeFormatter initialValue() { + return DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); } }; - private Timestamp timestamp = new Timestamp(0); + private Timestamp timestamp = new Timestamp(); /** * true if data is stored in timestamp field rather than byte arrays. @@ -120,20 +116,12 @@ public void set(byte[] bytes, int offset) { clearTimestamp(); } - public void setTime(long time) { - timestamp.setTime(time); - bytesEmpty = true; - timestampEmpty = false; - } - public void set(Timestamp t) { if (t == null) { - timestamp.setTime(0); - timestamp.setNanos(0); + timestamp.setLocalDateTime(null); return; } - timestamp.setTime(t.getTime()); - timestamp.setNanos(t.getNanos()); + timestamp.setLocalDateTime(t.getLocalDateTime()); bytesEmpty = true; timestampEmpty = false; } @@ -151,12 +139,10 @@ public void set(TimestampWritable t) { } public static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) { - ((Date) timestamp).setTime(secondsAsMillis); - timestamp.setNanos(nanos); + timestamp.setTimeInMillis(secondsAsMillis, nanos); } public void setInternal(long secondsAsMillis, int nanos) { - // This is our way of documenting that we are MUTATING the contents of // this writable's internal timestamp. updateTimestamp(timestamp, secondsAsMillis, nanos); @@ -180,7 +166,7 @@ public void writeToByteStream(RandomAccessOutput byteStream) { */ public long getSeconds() { if (!timestampEmpty) { - return TimestampUtils.millisToSeconds(timestamp.getTime()); + return timestamp.getSeconds(); } else if (!bytesEmpty) { return TimestampWritable.getSeconds(currentBytes, offset); } else { @@ -312,7 +298,7 @@ private void checkBytes() { public double getDouble() { double seconds, nanos; if (bytesEmpty) { - seconds = TimestampUtils.millisToSeconds(timestamp.getTime()); + seconds = timestamp.getSeconds(); nanos = timestamp.getNanos(); } else { seconds = getSeconds(); @@ -322,7 +308,7 @@ public double getDouble() { } public static long getLong(Timestamp timestamp) { - return timestamp.getTime() / 1000; + return timestamp.getSeconds(); } public void readFields(DataInput in) throws IOException { @@ -393,13 +379,13 @@ public String toString() { if (timestampString.length() > 19) { if (timestampString.length() == 21) { if (timestampString.substring(19).compareTo(".0") == 0) { - return threadLocalDateFormat.get().format(timestamp); + return threadLocalDateFormat.get().format(timestamp.getLocalDateTime()); } } - return threadLocalDateFormat.get().format(timestamp) + timestampString.substring(19); + return threadLocalDateFormat.get().format(timestamp.getLocalDateTime()) + timestampString.substring(19); } - return threadLocalDateFormat.get().format(timestamp); + return threadLocalDateFormat.get().format(timestamp.getLocalDateTime()); } @Override @@ -413,8 +399,7 @@ public int hashCode() { private void populateTimestamp() { long seconds = getSeconds(); int nanos = getNanos(); - timestamp.setTime(seconds * 1000); - timestamp.setNanos(nanos); + timestamp.setTimeInSeconds(seconds, nanos); } /** Static methods **/ @@ -474,10 +459,9 @@ public static int getNanos(byte[] bytes, int offset) { */ public static void convertTimestampToBytes(Timestamp t, byte[] b, int offset) { - long millis = t.getTime(); + long seconds = t.getSeconds(); int nanos = t.getNanos(); - long seconds = TimestampUtils.millisToSeconds(millis); boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE; boolean hasDecimal = setNanosBytes(nanos, b, offset+4, hasSecondVInt); @@ -535,34 +519,38 @@ public HiveDecimal getHiveDecimal() { public static HiveDecimal getHiveDecimal(Timestamp timestamp) { // The BigDecimal class recommends not converting directly from double to BigDecimal, // so we convert through a string... - Double timestampDouble = TimestampUtils.getDouble(timestamp); + long seconds = timestamp.getSeconds(); + Double timestampDouble = seconds + ((double) timestamp.getNanos()) / 1000000000; HiveDecimal result = HiveDecimal.create(timestampDouble.toString()); return result; } - /** * Converts the time in seconds or milliseconds to a timestamp. * @param time time in seconds or in milliseconds * @return the timestamp */ public static Timestamp longToTimestamp(long time, boolean intToTimestampInSeconds) { - // If the time is in seconds, converts it to milliseconds first. - return new Timestamp(intToTimestampInSeconds ? time * 1000 : time); + // If the time is in seconds, converts it to milliseconds first. + if (intToTimestampInSeconds) { + return Timestamp.ofEpochSecond(time); + } + return Timestamp.ofEpochMilli(time); } public static void setTimestamp(Timestamp t, byte[] bytes, int offset) { long seconds = getSeconds(bytes, offset); - t.setTime(seconds * 1000); + int nanos; if (hasDecimalOrSecondVInt(bytes[offset])) { - t.setNanos(getNanos(bytes, offset + 4)); + nanos = getNanos(bytes, offset + 4); } else { - t.setNanos(0); + nanos = 0; } + t.setTimeInSeconds(seconds, nanos); } public static Timestamp createTimestamp(byte[] bytes, int offset) { - Timestamp t = new Timestamp(0); + Timestamp t = new Timestamp(); TimestampWritable.setTimestamp(t, bytes, offset); return t; } diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java index 56945d1..6f73d13 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java @@ -20,10 +20,10 @@ import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; -import java.sql.Timestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector; diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java index 324f5b8..0d81a62 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.serde2.lazy; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -29,6 +28,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java index 3790d3c..ea5940f 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; @@ -35,6 +34,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java index 3829b08..7e0cc4d 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive; -import java.sql.Timestamp; import java.util.List; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java index e50ff5e..f300115 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; @@ -35,6 +34,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java index 5df2b6d4..80778a4 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java index 509189e..5930101 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -45,9 +44,7 @@ public Object copyObject(Object o) { return null; } Timestamp source = (Timestamp) o; - Timestamp copy = new Timestamp(source.getTime()); - copy.setNanos(source.getNanos()); - return copy; + return new Timestamp(source.getLocalDateTime()); } public Timestamp get(Object o) { @@ -58,7 +55,7 @@ public Object set(Object o, Timestamp value) { if (value == null) { return null; } - ((Timestamp) o).setTime(value.getTime()); + ((Timestamp) o).setLocalDateTime(value.getLocalDateTime()); return o; } @@ -72,13 +69,12 @@ public Object set(Object o, TimestampWritable tw) { return null; } Timestamp t = (Timestamp) o; - t.setTime(tw.getTimestamp().getTime()); - t.setNanos(tw.getTimestamp().getNanos()); + t.setLocalDateTime(tw.getTimestamp().getLocalDateTime()); return t; } public Object create(Timestamp value) { - return new Timestamp(value.getTime()); + return new Timestamp(value.getLocalDateTime()); } public Object create(byte[] bytes, int offset) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java index 66d47d7..206bf1c 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.serde2.objectinspector.primitive; import java.sql.Date; -import java.sql.Timestamp; import java.time.ZoneId; import org.apache.hadoop.hive.common.type.HiveChar; @@ -27,6 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; @@ -277,7 +277,7 @@ public TimestampConverter(PrimitiveObjectInspector inputOI, SettableTimestampObjectInspector outputOI) { this.inputOI = inputOI; this.outputOI = outputOI; - r = outputOI.create(new Timestamp(0)); + r = outputOI.create(new Timestamp()); } public void setIntToTimestampInSeconds(boolean intToTimestampInSeconds) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java index 024a8dd..da3b6fb 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.nio.charset.CharacterCodingException; import java.sql.Date; -import java.sql.Timestamp; import java.time.DateTimeException; import java.time.ZoneId; import java.util.HashMap; @@ -31,15 +30,15 @@ import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; -import org.apache.hadoop.hive.common.type.TimestampTZ; -import org.apache.hadoop.hive.common.type.TimestampTZUtil; -import org.apache.hadoop.hive.ql.util.TimestampUtils; -import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; -import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.common.type.TimestampTZ; +import org.apache.hadoop.hive.common.type.TimestampTZUtil; +import org.apache.hadoop.hive.ql.util.TimestampUtils; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -50,6 +49,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.LazyLong; @@ -1057,7 +1057,7 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) { } catch (IllegalArgumentException e) { Timestamp ts = getTimestampFromString(s); if (ts != null) { - result = new Date(ts.getTime()); + result = new Date(ts.getMillis()); } else { result = null; } @@ -1071,7 +1071,7 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) { } catch (IllegalArgumentException e) { Timestamp ts = getTimestampFromString(val); if (ts != null) { - result = new Date(ts.getTime()); + result = new Date(ts.getMillis()); } else { result = null; } @@ -1143,8 +1143,8 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI, result = TimestampUtils.doubleToTimestamp(((DoubleObjectInspector) inputOI).get(o)); break; case DECIMAL: - result = TimestampUtils.decimalToTimestamp(((HiveDecimalObjectInspector) inputOI) - .getPrimitiveJavaObject(o)); + result = TimestampUtils.decimalToTimestamp( + ((HiveDecimalObjectInspector) inputOI).getPrimitiveJavaObject(o)); break; case STRING: StringObjectInspector soi = (StringObjectInspector) inputOI; @@ -1156,7 +1156,7 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI, result = getTimestampFromString(getString(o, inputOI)); break; case DATE: - result = new Timestamp( + result = Timestamp.ofEpochMilli( ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get().getTime()); break; case TIMESTAMP: @@ -1192,7 +1192,7 @@ static Timestamp getTimestampFromString(String s) { } catch (IllegalArgumentException e) { // Let's try to parse it as timestamp with time zone and transform try { - result = Timestamp.from(TimestampTZUtil.parse(s).getZonedDateTime().toInstant()); + result = new Timestamp(TimestampTZUtil.parse(s).getZonedDateTime().toLocalDateTime()); } catch (DateTimeException e2) { result = null; } diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java index 8c84096..8fb86b3 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java index 66e8a40..bacdde9 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java index 5e24034..5ded38c 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; diff --git serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java index 66e3a96..d5efdfe 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java +++ serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.serde2; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -31,6 +30,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; diff --git serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java index 2442fca..6fa2a09 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java +++ serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -34,6 +33,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; diff --git serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java index 5302819..94f8b05 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java +++ serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.serde2.binarysortable; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Random; @@ -29,6 +28,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; diff --git serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java index 18b2032..331e7d0 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java +++ serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java @@ -18,25 +18,20 @@ package org.apache.hadoop.hive.serde2.binarysortable; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Random; -import junit.framework.TestCase; - -import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.common.type.HiveBaseChar; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -46,12 +41,12 @@ import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.apache.hadoop.io.Writable; import org.apache.hive.common.util.DateUtils; +import junit.framework.TestCase; + // Just the primitive types. public class MyTestPrimitiveClass { diff --git serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java index 3c483cc..3952772 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java +++ serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java @@ -26,7 +26,6 @@ import java.io.DataOutputStream; import java.io.IOException; import java.math.BigDecimal; -import java.sql.Timestamp; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -40,6 +39,7 @@ import static org.junit.Assert.*; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; @@ -68,10 +68,10 @@ protected DateFormat initialValue() { private static long getSeconds(Timestamp ts) { // To compute seconds, we first subtract the milliseconds stored in the nanos field of the // Timestamp from the result of getTime(). - long seconds = (ts.getTime() - ts.getNanos() / 1000000) / 1000; + long seconds = (ts.getMillis() - ts.getNanos() / 1000000) / 1000; // It should also be possible to calculate this based on ts.getTime() only. - assertEquals(seconds, TimestampUtils.millisToSeconds(ts.getTime())); + assertEquals(seconds, TimestampUtils.millisToSeconds(ts.getMillis())); return seconds; } @@ -181,9 +181,9 @@ private static TimestampWritable serializeDeserializeAndCheckTimestamp(Timestamp fromBinSort.setBinarySortable(binarySortableBytes, binarySortableOffset); assertTSWEquals(tsw, fromBinSort); - long timeSeconds = ts.getTime() / 1000; + long timeSeconds = ts.getSeconds(); if (0 <= timeSeconds && timeSeconds <= Integer.MAX_VALUE) { - assertEquals(new Timestamp(timeSeconds * 1000), + assertEquals(Timestamp.ofEpochSecond(timeSeconds), fromIntAndVInts((int) timeSeconds, 0).getTimestamp()); int nanos = reverseNanos(ts.getNanos()); @@ -247,7 +247,7 @@ private static void checkTimestampWithAndWithoutNanos(Timestamp ts, int nanos) throws IOException { serializeDeserializeAndCheckTimestamp(ts); - ts.setNanos(nanos); + ts = ts.withNanos(nanos); assertEquals(serializeDeserializeAndCheckTimestamp(ts).getNanos(), nanos); } @@ -288,7 +288,7 @@ public void testTimestampsWithinPositiveIntRange() throws IOException { Random rand = new Random(294722773L); for (int i = 0; i < 10000; ++i) { long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000; - checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand)); + checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand)); } } @@ -306,7 +306,7 @@ public void testTimestampsOutsidePositiveIntRange() throws IOException { Random rand = new Random(789149717L); for (int i = 0; i < 10000; ++i) { long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand); - checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand)); + checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand)); } } @@ -315,7 +315,7 @@ public void testTimestampsOutsidePositiveIntRange() throws IOException { public void testTimestampsInFullRange() throws IOException { Random rand = new Random(2904974913L); for (int i = 0; i < 10000; ++i) { - checkTimestampWithAndWithoutNanos(new Timestamp(rand.nextLong()), randomNanos(rand)); + checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(rand.nextLong()), randomNanos(rand)); } } @@ -326,9 +326,8 @@ public void testToFromDouble() { for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) { for (int i = 0; i < 10000; ++i) { long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand); - Timestamp ts = new Timestamp(millis); int nanos = randomNanos(rand, nanosPrecision); - ts.setNanos(nanos); + Timestamp ts = Timestamp.ofEpochMilli(millis, nanos); TimestampWritable tsw = new TimestampWritable(ts); double asDouble = tsw.getDouble(); int recoveredNanos = @@ -356,9 +355,9 @@ private static HiveDecimal timestampToDecimal(Timestamp ts) { public void testDecimalToTimestampRandomly() { Random rand = new Random(294729777L); for (int i = 0; i < 10000; ++i) { - Timestamp ts = new Timestamp( + Timestamp ts = Timestamp.ofEpochMilli( randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand)); - ts.setNanos(randomNanos(rand, 9)); // full precision + ts = ts.withNanos(randomNanos(rand, 9)); // full precision assertEquals(ts, TimestampUtils.decimalToTimestamp(timestampToDecimal(ts))); } } @@ -367,10 +366,10 @@ public void testDecimalToTimestampRandomly() { @Concurrent(count=4) @Repeating(repetition=100) public void testDecimalToTimestampCornerCases() { - Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33")); - assertEquals(0, ts.getTime() % 1000); + Timestamp ts = Timestamp.ofEpochMilli(parseToMillis("1969-03-04 05:44:33")); + assertEquals(0, ts.getMillis() % 1000); for (int nanos : new int[] { 100000, 900000, 999100000, 999900000 }) { - ts.setNanos(nanos); + ts = ts.withNanos(nanos); HiveDecimal d = timestampToDecimal(ts); assertEquals(ts, TimestampUtils.decimalToTimestamp(d)); assertEquals(ts, TimestampUtils.doubleToTimestamp(d.bigDecimalValue().doubleValue())); @@ -473,8 +472,7 @@ public void testBinarySortable() { Random rand = new Random(5972977L); List tswList = new ArrayList(); for (int i = 0; i < 50; ++i) { - Timestamp ts = new Timestamp(rand.nextLong()); - ts.setNanos(randomNanos(rand)); + Timestamp ts = Timestamp.ofEpochMilli(rand.nextLong(), randomNanos(rand)); tswList.add(new TimestampWritable(ts)); } for (TimestampWritable tsw1 : tswList) { @@ -509,10 +507,10 @@ public void testSetTimestamp() { } private static void verifySetTimestamp(long time) { - Timestamp t1 = new Timestamp(time); + Timestamp t1 = Timestamp.ofEpochMilli(time); TimestampWritable writable = new TimestampWritable(t1); byte[] bytes = writable.getBytes(); - Timestamp t2 = new Timestamp(0); + Timestamp t2 = new Timestamp(); TimestampWritable.setTimestamp(t2, bytes, 0); assertEquals(t1, t2); } diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java index 1e06049..3974859 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.serde2.lazybinary; import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -31,10 +30,10 @@ import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass; import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass; -import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo; /** diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java index 5449a5f..ddb4c21 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.serde2.lazybinary; import java.sql.Date; -import java.sql.Timestamp; import java.util.Random; import org.apache.hadoop.hive.common.type.HiveChar; @@ -27,7 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; -import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo; diff --git storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java index eeb3359..7811c19 100644 --- storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java +++ storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.common.type; import java.sql.Date; -import java.sql.Timestamp; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; diff --git storage-api/src/java/org/apache/hadoop/hive/common/type/Timestamp.java storage-api/src/java/org/apache/hadoop/hive/common/type/Timestamp.java new file mode 100644 index 0000000..649f9b6 --- /dev/null +++ storage-api/src/java/org/apache/hadoop/hive/common/type/Timestamp.java @@ -0,0 +1,189 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.type; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * This is the internal type for Timestamp. + * The full qualified input format of Timestamp with time zone is + * "yyyy-MM-dd HH:mm:ss[.SSS...]", where the time part is optional. + * If time part is absent, a default '00:00:00.0' will be used. + */ +public class Timestamp implements Comparable { + + private static final LocalDateTime EPOCH = LocalDateTime.of(1970, 1, 1, 0, 0, 0); + private static final Pattern SINGLE_DIGIT_PATTERN = Pattern.compile("[\\+-]\\d:\\d\\d"); + private static final DateTimeFormatter FORMATTER; + static { + DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); + // Date part + builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + // Time part + builder.optionalStart(). + appendLiteral(" ").append(DateTimeFormatter.ofPattern("HH:mm:ss")). + optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 1, 9, true).optionalEnd() + .optionalEnd(); + FORMATTER = builder.toFormatter(); + } + + private LocalDateTime localDateTime; + + public Timestamp() { + this(EPOCH); + } + + public Timestamp(LocalDateTime localDateTime) { + setLocalDateTime(localDateTime); + } + +// public Timestamp(long seconds, int nanos) { +// set(seconds, nanos); +// } +// +// /** +// * Obtains an instance of Instant using seconds from the epoch of 1970-01-01T00:00:00Z and +// * nanosecond fraction of second. Then, it creates a zoned date-time with the same instant +// * as that specified but in the given time-zone. +// */ +// public void set(long seconds, int nanos) { +// setLocalDateTime(LocalDateTime.ofEpochSecond(seconds, nanos, ZoneOffset.UTC)); +// } + + public LocalDateTime getLocalDateTime() { + return localDateTime; + } + + public void setLocalDateTime(LocalDateTime localDateTime) { + this.localDateTime = localDateTime != null ? localDateTime : EPOCH; + } + + @Override + public String toString() { + return localDateTime.format(FORMATTER); + } + + public int hashCode() { + return localDateTime.hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other instanceof Timestamp) { + return compareTo((Timestamp) other) == 0; + } + return false; + } + + @Override + public int compareTo(Timestamp o) { + return localDateTime.compareTo(o.localDateTime); + } + + public long getSeconds() { + return localDateTime.toEpochSecond(ZoneOffset.UTC); + } + + public void setTimeInSeconds(long epochSecond) { + setTimeInSeconds(epochSecond, 0); + } + + public void setTimeInSeconds(long epochSecond, int nanos) { + localDateTime = LocalDateTime.ofEpochSecond( + epochSecond, nanos, ZoneOffset.UTC); + } + + public long getMillis() { + return localDateTime.toInstant(ZoneOffset.UTC).toEpochMilli(); + } + + public void setTimeInMillis(long epochMilli) { + localDateTime = LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC); + } + + public void setTimeInMillis(long epochMilli, int nanos) { + localDateTime = LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli).plusNanos(nanos), ZoneOffset.UTC); + } + + public int getNanos() { + return localDateTime.getNano(); + } + + public static Timestamp valueOf(String s) { + // need to handle offset with single digital hour, see JDK-8066806 + s = handleSingleDigitHourOffset(s); + LocalDateTime localDateTime; + try { + localDateTime = LocalDateTime.parse(s, FORMATTER); + } catch (DateTimeParseException e) { + throw new IllegalArgumentException("Cannot create timestamp, parsing error"); + } + return new Timestamp(localDateTime); + } + + private static String handleSingleDigitHourOffset(String s) { + Matcher matcher = SINGLE_DIGIT_PATTERN.matcher(s); + if (matcher.find()) { + int index = matcher.start() + 1; + s = s.substring(0, index) + "0" + s.substring(index, s.length()); + } + return s; + } + + public static Timestamp ofEpochSecond(long epochSecond) { + return ofEpochSecond(epochSecond, 0); + } + + public static Timestamp ofEpochSecond(long epochSecond, int nanos) { + return new Timestamp( + LocalDateTime.ofEpochSecond(epochSecond, nanos, ZoneOffset.UTC)); + } + + public static Timestamp ofEpochMilli(long epochMilli) { + return ofEpochMilli(epochMilli, 0); + } + + public static Timestamp ofEpochMilli(long epochMilli, int nanos) { + return new Timestamp( + LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli).plusNanos(nanos), ZoneOffset.UTC)); + } + + public Timestamp withNanos(int nanos) { + return new Timestamp(localDateTime.plusNanos(nanos)); + } + + /** + * Return a copy of this object. + */ + public Object clone() { + // LocalDateTime is immutable. + return new Timestamp(this.localDateTime); + } + +} diff --git storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java index ef1c817..45f5b3c 100644 --- storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java +++ storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Timestamp; import java.util.Arrays; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.io.Writable; /** @@ -72,7 +72,7 @@ public TimestampColumnVector(int len) { time = new long[len]; nanos = new int[len]; - scratchTimestamp = new Timestamp(0); + scratchTimestamp = new Timestamp(); scratchWritable = null; // Allocated by caller. } @@ -112,8 +112,7 @@ public int getNanos(int elementNum) { * @param elementNum */ public void timestampUpdate(Timestamp timestamp, int elementNum) { - timestamp.setTime(time[elementNum]); - timestamp.setNanos(nanos[elementNum]); + timestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); } /** @@ -123,8 +122,7 @@ public void timestampUpdate(Timestamp timestamp, int elementNum) { * @return */ public Timestamp asScratchTimestamp(int elementNum) { - scratchTimestamp.setTime(time[elementNum]); - scratchTimestamp.setNanos(nanos[elementNum]); + scratchTimestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); return scratchTimestamp; } @@ -142,8 +140,7 @@ public Timestamp getScratchTimestamp() { * @return */ public long getTimestampAsLong(int elementNum) { - scratchTimestamp.setTime(time[elementNum]); - scratchTimestamp.setNanos(nanos[elementNum]); + scratchTimestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); return getTimestampAsLong(scratchTimestamp); } @@ -153,30 +150,17 @@ public long getTimestampAsLong(int elementNum) { * @return */ public static long getTimestampAsLong(Timestamp timestamp) { - return millisToSeconds(timestamp.getTime()); + return timestamp.getSeconds(); } // Copy of TimestampWritable.millisToSeconds /** - * Rounds the number of milliseconds relative to the epoch down to the nearest whole number of - * seconds. 500 would round to 0, -500 would round to -1. - */ - private static long millisToSeconds(long millis) { - if (millis >= 0) { - return millis / 1000; - } else { - return (millis - 999) / 1000; - } - } - - /** * Return a double representation of a Timestamp. * @param elementNum * @return */ public double getDouble(int elementNum) { - scratchTimestamp.setTime(time[elementNum]); - scratchTimestamp.setNanos(nanos[elementNum]); + scratchTimestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); return getDouble(scratchTimestamp); } @@ -188,7 +172,7 @@ public double getDouble(int elementNum) { public static double getDouble(Timestamp timestamp) { // Same algorithm as TimestampWritable (not currently import-able here). double seconds, nanos; - seconds = millisToSeconds(timestamp.getTime()); + seconds = timestamp.getSeconds(); nanos = timestamp.getNanos(); return seconds + nanos / 1000000000; } @@ -285,7 +269,7 @@ public void set(int elementNum, Timestamp timestamp) { this.noNulls = false; this.isNull[elementNum] = true; } else { - this.time[elementNum] = timestamp.getTime(); + this.time[elementNum] = timestamp.getMillis(); this.nanos[elementNum] = timestamp.getNanos(); } } @@ -295,7 +279,7 @@ public void set(int elementNum, Timestamp timestamp) { * @param elementNum */ public void setFromScratchTimestamp(int elementNum) { - this.time[elementNum] = scratchTimestamp.getTime(); + this.time[elementNum] = scratchTimestamp.getMillis(); this.nanos[elementNum] = scratchTimestamp.getNanos(); } @@ -363,7 +347,7 @@ public void copySelected( public void fill(Timestamp timestamp) { noNulls = true; isRepeating = true; - time[0] = timestamp.getTime(); + time[0] = timestamp.getMillis(); nanos[0] = timestamp.getNanos(); } @@ -390,8 +374,7 @@ public void stringifyValue(StringBuilder buffer, int row) { row = 0; } if (noNulls || !isNull[row]) { - scratchTimestamp.setTime(time[row]); - scratchTimestamp.setNanos(nanos[row]); + scratchTimestamp.setTimeInMillis(time[row], nanos[row]); buffer.append(scratchTimestamp.toString()); } else { buffer.append("null"); diff --git storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java index dfc7272..f79ddd8 100644 --- storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java +++ storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hive.ql.util; +import java.math.BigDecimal; + import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveDecimalV1; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; -import java.math.BigDecimal; -import java.sql.Timestamp; - /** - * Utitilities for Timestamps and the relevant conversions. + * Utilities for Timestamps and the relevant conversions. */ public class TimestampUtils { public static final BigDecimal BILLION_BIG_DECIMAL = BigDecimal.valueOf(1000000000); @@ -36,7 +36,7 @@ * @return double representation of the timestamp, accurate to nanoseconds */ public static double getDouble(Timestamp ts) { - long seconds = millisToSeconds(ts.getTime()); + long seconds = ts.getSeconds(); return seconds + ((double) ts.getNanos()) / 1000000000; } @@ -52,17 +52,7 @@ public static Timestamp doubleToTimestamp(double f) { bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000)); int nanos = bd.intValue(); - // Convert to millis - long millis = seconds * 1000; - if (nanos < 0) { - millis -= 1000; - nanos += 1000000000; - } - Timestamp t = new Timestamp(millis); - - // Set remaining fractional portion to nanos - t.setNanos(nanos); - return t; + return Timestamp.ofEpochSecond(seconds, nanos); } catch (NumberFormatException nfe) { return null; } catch (IllegalArgumentException iae) { @@ -99,9 +89,7 @@ public static Timestamp decimalToTimestamp(HiveDecimal dec) { return null; } long seconds = nanoInstant.longValue(); - Timestamp t = new Timestamp(seconds * 1000); - t.setNanos(nanos); - return t; + return Timestamp.ofEpochSecond(seconds, nanos); } /** @@ -142,9 +130,7 @@ public static Timestamp decimalToTimestamp( } long seconds = nanoInstant.longValue(); - Timestamp timestamp = new Timestamp(seconds * 1000L); - timestamp.setNanos(nanos); - return timestamp; + return Timestamp.ofEpochSecond(seconds, nanos); } public static Timestamp decimalToTimestamp(HiveDecimalV1 dec) { @@ -156,10 +142,8 @@ public static Timestamp decimalToTimestamp(HiveDecimalV1 dec) { } long seconds = nanoInstant.subtract(new BigDecimal(nanos)).divide(BILLION_BIG_DECIMAL).longValue(); - Timestamp t = new Timestamp(seconds * 1000); - t.setNanos(nanos); - return t; + return Timestamp.ofEpochSecond(seconds, nanos); } catch (NumberFormatException nfe) { return null; } catch (IllegalArgumentException iae) { diff --git storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java index d11f41c..d180593 100644 --- storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java +++ storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hive.common.type; -import java.sql.Timestamp; -import java.util.Random; -import java.util.Arrays; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -27,16 +28,15 @@ import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; +import java.util.Arrays; +import java.util.Random; -import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1; -import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; -import org.apache.hadoop.hive.common.type.RandomTypeUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.util.TimestampUtils; - -import org.junit.*; - -import static org.junit.Assert.*; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1; +import org.junit.Assert; +import org.junit.Test; public class TestHiveDecimal extends HiveDecimalTestBase { diff --git storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java index c175ed0..6c8cb23 100644 --- storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java +++ storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java @@ -18,15 +18,16 @@ package org.apache.hadoop.hive.ql.exec.vector; -import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; +import java.time.LocalDateTime; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.junit.Test; /** * Test for StructColumnVector @@ -107,11 +108,11 @@ public void testStringify() throws IOException { batch.cols[0] = x; batch.cols[1] = y; batch.reset(); - Timestamp ts = Timestamp.valueOf("2000-01-01 00:00:00"); + Timestamp ts = new Timestamp(LocalDateTime.of(2000, 1, 1, 0, 0, 0)); for(int r=0; r < 10; ++r) { batch.size += 1; x1.vector[r] = 3 * r; - ts.setTime(ts.getTime() + 1000); + ts.setTimeInMillis(ts.getMillis() + 1000); x2.set(r, ts); byte[] buffer = ("value " + r).getBytes(StandardCharsets.UTF_8); y.setRef(r, buffer, 0, buffer.length); diff --git storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java index 6e5d5c8..1cced74 100644 --- storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java +++ storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java @@ -23,11 +23,11 @@ import java.io.PrintWriter; import java.math.BigDecimal; import java.math.RoundingMode; -import java.sql.Timestamp; import java.util.Date; import java.util.Random; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import static org.junit.Assert.*; diff --git vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java index 51ff0cc..8cc3390 100644 --- vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java +++ vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java @@ -1564,7 +1564,7 @@ private void generateFilterColumnBetweenDynamicValue(String[] tdesc) throws Exce // Special case - Date requires its own specific BetweenDynamicValue class, but derives from FilterLongColumnBetween typeName = "Long"; } else if (operandType.equals("timestamp")) { - defaultValue = "new Timestamp(0)"; + defaultValue = "new Timestamp()"; vectorType = "Timestamp"; getPrimitiveMethod = "getTimestamp"; getValueMethod = "";