diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java index 926f5720ac..d38447f3db 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java @@ -22,8 +22,7 @@ import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; -import java.sql.Date; -import java.sql.Timestamp; +import java.time.LocalDateTime; import java.util.Map.Entry; import org.apache.accumulo.core.client.BatchWriter; @@ -39,9 +38,11 @@ import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants; import org.apache.hadoop.hive.accumulo.AccumuloHiveRow; import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -234,7 +235,7 @@ public void testBinaryTypes() throws Exception { // date baos.reset(); - Date now = new Date(System.currentTimeMillis()); + Date now = Date.ofEpochMilli(System.currentTimeMillis()); DateWritable dateWritable = new DateWritable(now); Date dateValue = dateWritable.get(); dateWritable.write(out); @@ -242,9 +243,9 @@ public void testBinaryTypes() throws Exception { // tiemestamp baos.reset(); - Timestamp timestampValue = new Timestamp(now.getTime()); + Timestamp timestampValue = new Timestamp(LocalDateTime.now()); ByteStream.Output output = new ByteStream.Output(); - TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime())); + TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(LocalDateTime.now())); timestampWritable.write(new DataOutputStream(output)); output.close(); m.put(cfBytes, "timestamp".getBytes(), output.toByteArray()); @@ -587,7 +588,7 @@ public void testUtf8Types() throws Exception { m.put(cfBytes, "decimal".getBytes(), baos.toByteArray()); // date - Date now = new Date(System.currentTimeMillis()); + Date now = Date.ofEpochMilli(System.currentTimeMillis()); DateWritable dateWritable = new DateWritable(now); Date dateValue = dateWritable.get(); baos.reset(); @@ -598,7 +599,7 @@ public void testUtf8Types() throws Exception { m.put(cfBytes, "date".getBytes(), baos.toByteArray()); // timestamp - Timestamp timestampValue = new Timestamp(now.getTime()); + Timestamp timestampValue = new Timestamp(LocalDateTime.now()); baos.reset(); JavaTimestampObjectInspector timestampOI = (JavaTimestampObjectInspector) PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(TypeInfoFactory diff --git a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java index 90ffddba0d..213650c2a5 100644 --- a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java +++ b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hive.common.type; -import java.sql.Timestamp; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.time.DateTimeException; @@ -31,7 +30,6 @@ import java.time.format.TextStyle; import java.time.temporal.ChronoField; import java.time.temporal.TemporalAccessor; -import java.util.Date; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -45,9 +43,6 @@ private static final LocalTime DEFAULT_LOCAL_TIME = LocalTime.of(0, 0); private static final Pattern SINGLE_DIGIT_PATTERN = Pattern.compile("[\\+-]\\d:\\d\\d"); - private static final ThreadLocal CONVERT_FORMATTER = - ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")); - static final DateTimeFormatter FORMATTER; static { DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); @@ -126,11 +121,14 @@ public static TimestampTZ parseOrNull(String s, ZoneId defaultTimeZone) { } } - // Converts Date to TimestampTZ. The conversion is done text-wise since - // Date/Timestamp should be treated as description of date/time. + // Converts Date to TimestampTZ. public static TimestampTZ convert(Date date, ZoneId defaultTimeZone) { - String s = date instanceof Timestamp ? date.toString() : CONVERT_FORMATTER.get().format(date); - return parse(s, defaultTimeZone); + return parse(date.toString(), defaultTimeZone); + } + + // Converts Timestamp to TimestampTZ. + public static TimestampTZ convert(Timestamp ts, ZoneId defaultTimeZone) { + return parse(ts.toString(), defaultTimeZone); } public static ZoneId parseTimeZone(String timeZoneStr) { diff --git a/common/src/java/org/apache/hive/common/util/DateParser.java b/common/src/java/org/apache/hive/common/util/DateParser.java index 949fdbafcf..c466b1995b 100644 --- a/common/src/java/org/apache/hive/common/util/DateParser.java +++ b/common/src/java/org/apache/hive/common/util/DateParser.java @@ -17,24 +17,25 @@ */ package org.apache.hive.common.util; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; + import java.text.ParsePosition; import java.text.SimpleDateFormat; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; /** * Date parser class for Hive. */ public class DateParser { - private final SimpleDateFormat formatter; - private final ParsePosition pos; + public DateParser() { - formatter = new SimpleDateFormat("yyyy-MM-dd"); - // TODO: ideally, we should set formatter.setLenient(false); - pos = new ParsePosition(0); - } + } public Date parseDate(String strValue) { - Date result = new Date(0); + Date result = new Date(); if (parseDate(strValue, result)) { return result; } @@ -42,12 +43,11 @@ public Date parseDate(String strValue) { } public boolean parseDate(String strValue, Date result) { - pos.setIndex(0); - java.util.Date parsedVal = formatter.parse(strValue, pos); + Date parsedVal = Date.valueOf(strValue); if (parsedVal == null) { return false; } - result.setTime(parsedVal.getTime()); + result.setTimeInMillis(parsedVal.getMillis()); return true; } } diff --git a/common/src/java/org/apache/hive/common/util/TimestampParser.java b/common/src/java/org/apache/hive/common/util/TimestampParser.java index f674b5d30b..d9e8913855 100644 --- a/common/src/java/org/apache/hive/common/util/TimestampParser.java +++ b/common/src/java/org/apache/hive/common/util/TimestampParser.java @@ -18,19 +18,17 @@ package org.apache.hive.common.util; -import java.math.BigDecimal; -import java.sql.Timestamp; import java.util.Arrays; -import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.hadoop.hive.common.type.Timestamp; import org.joda.time.DateTime; -import org.joda.time.IllegalInstantException; import org.joda.time.MutableDateTime; import org.joda.time.DateTimeFieldType; +import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatterBuilder; @@ -52,19 +50,8 @@ protected final static String[] stringArray = new String[] {}; protected final static String millisFormatString = "millis"; - @Nullable - private final static DateTime startingDateValue = makeStartingDateValue(); - - @Nullable - private static DateTime makeStartingDateValue() { - try { - return new DateTime(1970, 1, 1, 0, 0, 0, 0); - } catch (IllegalInstantException e) { - // 1970-01-01 00:00:00 did not exist in some zones. In these zones, we need to take different, - // less optimal parsing route. - return null; - } - } + protected final static DateTime startingDateValue = + new DateTime(1970, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); protected String[] formatStrings = null; protected DateTimeFormatter fmt = null; @@ -126,21 +113,22 @@ public Timestamp parseTimestamp(String strValue) throws IllegalArgumentException if (startingDateValue != null) { // reset value in case any date fields are missing from the date pattern - MutableDateTime mdt = new MutableDateTime(startingDateValue); + MutableDateTime mdt = new MutableDateTime( + startingDateValue, ISOChronology.getInstanceUTC()); // Using parseInto() avoids throwing exception when parsing, // allowing fallback to default timestamp parsing if custom patterns fail. int ret = fmt.parseInto(mdt, strValue, 0); // Only accept parse results if we parsed the entire string if (ret == strValue.length()) { - return Optional.of(new Timestamp(mdt.getMillis())); + return Optional.of(Timestamp.ofEpochMilli(mdt.getMillis())); } return Optional.empty(); } try { DateTime dt = fmt.parseDateTime(strValue); - return Optional.of(new Timestamp(dt.getMillis())); + return Optional.of(Timestamp.ofEpochMilli(dt.getMillis())); } catch (IllegalArgumentException e) { return Optional.empty(); } @@ -181,7 +169,8 @@ public int parseInto(DateTimeParserBucket bucket, String text, int position) { // Joda DateTime only has precision to millis, cut off any fractional portion long millis = Long.parseLong(matcher.group(1)); - DateTime dt = new DateTime(millis); + DateTime dt = + new DateTime(millis, ISOChronology.getInstanceUTC()); for (DateTimeFieldType field : dateTimeFields) { bucket.saveField(field, dt.get(field)); } diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java index 5a3f0481bc..cd23abebfa 100644 --- a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java +++ b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java @@ -21,7 +21,6 @@ import org.junit.Assert; import org.junit.Test; -import java.sql.Timestamp; import java.time.ZoneId; import java.time.format.DateTimeParseException; import java.util.TimeZone; diff --git a/common/src/test/org/apache/hive/common/util/TestDateParser.java b/common/src/test/org/apache/hive/common/util/TestDateParser.java index 0553b3d387..3a3585124c 100644 --- a/common/src/test/org/apache/hive/common/util/TestDateParser.java +++ b/common/src/test/org/apache/hive/common/util/TestDateParser.java @@ -18,13 +18,13 @@ package org.apache.hive.common.util; import static org.junit.Assert.*; -import org.junit.Test; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; +import org.junit.Test; public class TestDateParser { DateParser parser = new DateParser(); - Date date = new Date(0); + Date date = new Date(); void checkValidCase(String strValue, Date expected) { Date dateValue = parser.parseDate(strValue); diff --git a/common/src/test/org/apache/hive/common/util/TestTimestampParser.java b/common/src/test/org/apache/hive/common/util/TestTimestampParser.java index c982af65c6..3cc474f82b 100644 --- a/common/src/test/org/apache/hive/common/util/TestTimestampParser.java +++ b/common/src/test/org/apache/hive/common/util/TestTimestampParser.java @@ -18,11 +18,10 @@ package org.apache.hive.common.util; -import java.sql.Timestamp; -import java.util.Arrays; -import java.util.List; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -import static org.junit.Assert.*; +import org.apache.hadoop.hive.common.type.Timestamp; import org.junit.Test; public class TestTimestampParser { @@ -133,10 +132,10 @@ public void testMillisParser() { TimestampParser tp = new TimestampParser(patterns); ValidTimestampCase[] validCases = { - new ValidTimestampCase("0", new Timestamp(0)), - new ValidTimestampCase("-1000000", new Timestamp(-1000000)), - new ValidTimestampCase("1420509274123", new Timestamp(1420509274123L)), - new ValidTimestampCase("1420509274123.456789", new Timestamp(1420509274123L)), + new ValidTimestampCase("0", Timestamp.ofEpochMilli(0)), + new ValidTimestampCase("-1000000", Timestamp.ofEpochMilli(-1000000)), + new ValidTimestampCase("1420509274123", Timestamp.ofEpochMilli(1420509274123L)), + new ValidTimestampCase("1420509274123.456789", Timestamp.ofEpochMilli(1420509274123L)), // Other format pattern should also work new ValidTimestampCase("1945-12-31T23:59:59", diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java index d991adb088..8badb2dfc4 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.io.InputStream; -import java.sql.Timestamp; import java.time.Instant; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; @@ -38,6 +37,7 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; @@ -324,8 +324,7 @@ public Writable serialize(Object o, ObjectInspector objectInspector) throws SerD switch (types[i].getPrimitiveCategory()) { case TIMESTAMP: res = ((TimestampObjectInspector) fields.get(i).getFieldObjectInspector()) - .getPrimitiveJavaObject( - values.get(i)).getTime(); + .getPrimitiveJavaObject(values.get(i)).getMillis(); break; case TIMESTAMPLOCALTZ: res = ((TimestampLocalTZObjectInspector) fields.get(i).getFieldObjectInspector()) @@ -383,7 +382,7 @@ public Writable serialize(Object o, ObjectInspector objectInspector) throws SerD .equals(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME)); value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, ((TimestampObjectInspector) fields.get(granularityFieldIndex).getFieldObjectInspector()) - .getPrimitiveJavaObject(values.get(granularityFieldIndex)).getTime() + .getPrimitiveJavaObject(values.get(granularityFieldIndex)).getMillis() ); if (values.size() == columns.length + 2) { // Then partition number if any. diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java index e4fa1a2e59..ab3ae0bfeb 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java @@ -27,7 +27,6 @@ import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; -import java.sql.Timestamp; import java.time.Instant; import java.time.ZoneOffset; import java.util.ArrayList; @@ -35,20 +34,12 @@ import java.util.Map.Entry; import java.util.Properties; -import com.fasterxml.jackson.core.type.TypeReference; -import com.google.common.util.concurrent.SettableFuture; -import com.metamx.http.client.HttpClient; -import com.metamx.http.client.response.HttpResponseHandler; -import io.druid.data.input.Row; -import io.druid.query.Result; -import io.druid.query.select.SelectResultValue; -import io.druid.query.timeseries.TimeseriesResultValue; -import io.druid.query.topn.TopNResultValue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.druid.DruidStorageHandlerUtils; @@ -79,17 +70,25 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; - import org.junit.Before; import org.junit.Test; import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonMappingException; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import com.google.common.util.concurrent.SettableFuture; +import com.metamx.http.client.HttpClient; +import com.metamx.http.client.response.HttpResponseHandler; +import io.druid.data.input.Row; import io.druid.query.Query; +import io.druid.query.Result; +import io.druid.query.select.SelectResultValue; +import io.druid.query.timeseries.TimeseriesResultValue; +import io.druid.query.topn.TopNResultValue; /** * Basic tests for Druid SerDe. The examples are taken from Druid 0.9.1.1 @@ -778,7 +777,7 @@ private void deserializeQueryResults(DruidSerDe serDe, String queryType, String new IntWritable(1112123), new ShortWritable((short) 12), new ByteWritable((byte) 0), - new TimestampWritable(new Timestamp(1377907200000L)) // granularity + new TimestampWritable(Timestamp.ofEpochSecond(1377907200L)) // granularity }; private static final DruidWritable DRUID_WRITABLE = new DruidWritable( ImmutableMap.builder() diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java index bc4e1466f5..6ff37955c1 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java @@ -102,7 +102,7 @@ public Writable serialize(Object obj, ObjectInspector objInspector) throws Excep timestamp = ((LongObjectInspector)inspector).get(value); } else { PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector; - timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime(); + timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getMillis(); } } diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java index 05cc30a621..b2774654ac 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java @@ -296,7 +296,7 @@ static long getTimestampVal(IndexSearchCondition sc) throws IOException { timestamp = ((LongObjectInspector) inspector).get(value); } else { PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector; - timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime(); + timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getMillis(); } } catch (HiveException e) { throw new IOException(e); diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java index d94dbe8d8a..2aeaa33b9b 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java @@ -23,6 +23,7 @@ import java.util.List; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory; import org.apache.hadoop.hive.serde2.SerDeException; @@ -161,7 +162,8 @@ private Object uncheckedGetField(int fieldID) { } LazyObjectBase lz = fields[fieldID]; if (lz instanceof LazyTimestamp) { - ((LazyTimestamp) lz).getWritableObject().setTime(timestamp); + ((LazyTimestamp) lz).getWritableObject().set( + Timestamp.ofEpochMilli(timestamp)); } else { ((LazyLong) lz).getWritableObject().set(timestamp); } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java index 114c205c83..af80c02b9e 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java @@ -21,8 +21,6 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.charset.CharacterCodingException; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -34,9 +32,11 @@ import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java index d2954e030a..2f868b5e32 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java @@ -22,7 +22,6 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import java.sql.Date; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -30,9 +29,11 @@ import java.util.Map; import java.util.Map.Entry; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -217,7 +218,7 @@ public static void writeDatum(DataOutput out, Object val) throws IOException { new DateWritable((Date)val).write(out); return; case DataType.TIMESTAMP: - new TimestampWritable((java.sql.Timestamp)val).write(out); + new TimestampWritable((Timestamp)val).write(out); return; default: throw new IOException("Unexpected data type " + type + diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java index d80b6d43fe..5ab026400b 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java @@ -16,11 +16,11 @@ package org.apache.hive.benchmark.vectorization; -import java.sql.Timestamp; import java.util.Random; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; @@ -144,7 +144,7 @@ private static ColumnVector generateTimestampColumnVector(final boolean nulls, final boolean repeating, final int size, final Random rand) { Timestamp[] timestamps = new Timestamp[size]; for (int i = 0; i < size; i++) { - timestamps[i] = new Timestamp(rand.nextInt()); + timestamps[i] = Timestamp.ofEpochMilli(rand.nextInt()); } return generateTimestampColumnVector(nulls, repeating, size, rand, timestamps); } diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt index 0d3ee2b74c..2bbd9e4472 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt @@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; /** diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt index be5f641291..44a8f19649 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt index bf1128a624..3574bf29a9 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; @@ -39,9 +39,9 @@ public class extends VectorExpression { private final int colNum1; private final int colNum2; - private transient final Date scratchDate1 = new Date(0); + private transient final Date scratchDate1 = new Date(); private transient final HiveIntervalYearMonth scratchIntervalYearMonth2 = new HiveIntervalYearMonth(); - private transient final Date outputDate = new Date(0); + private transient final Date outputDate = new Date(); private transient final DateTimeMath dtm = new DateTimeMath(); public (int colNum1, int colNum2, int outputColumnNum) { @@ -98,13 +98,13 @@ public class extends VectorExpression { * conditional checks in the inner loop. */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); scratchIntervalYearMonth2.set((int) vector2[0]); dtm.( scratchDate1, scratchIntervalYearMonth2, outputDate); outputVector[0] = DateWritable.dateToDays(outputDate); } else if (inputColVector1.isRepeating) { - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; @@ -125,7 +125,7 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); scratchIntervalYearMonth2.set((int) vector2[i]); dtm.( scratchDate1, scratchIntervalYearMonth2, outputDate); @@ -133,7 +133,7 @@ public class extends VectorExpression { } } else { for(int i = 0; i != n; i++) { - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); scratchIntervalYearMonth2.set((int) vector2[i]); dtm.( scratchDate1, scratchIntervalYearMonth2, outputDate); @@ -144,7 +144,7 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); scratchIntervalYearMonth2.set((int) vector2[i]); dtm.( scratchDate1, scratchIntervalYearMonth2, outputDate); @@ -152,7 +152,7 @@ public class extends VectorExpression { } } else { for(int i = 0; i != n; i++) { - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); scratchIntervalYearMonth2.set((int) vector2[i]); dtm.( scratchDate1, scratchIntervalYearMonth2, outputDate); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt index 172bd39f10..167863b831 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -42,8 +42,8 @@ public class extends VectorExpression { private final int colNum; private final HiveIntervalYearMonth value; - private transient final Date scratchDate1 = new Date(0); - private transient final Date outputDate = new Date(0); + private transient final Date scratchDate1 = new Date(); + private transient final Date outputDate = new Date(); private transient final DateTimeMath dtm = new DateTimeMath(); public (int colNum, long value, int outputColumnNum) { @@ -92,7 +92,7 @@ public class extends VectorExpression { if (inputColVector1.isRepeating) { if (inputColVector1.noNulls || !inputIsNull[0]) { outputIsNull[0] = false; - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); dtm.( scratchDate1, value, outputDate); outputVector[0] = DateWritable.dateToDays(outputDate); @@ -114,7 +114,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { final int i = sel[j]; outputIsNull[i] = false; - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchDate1, value, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -122,7 +122,7 @@ public class extends VectorExpression { } else { for(int j = 0; j != n; j++) { final int i = sel[j]; - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchDate1, value, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -137,7 +137,7 @@ public class extends VectorExpression { outputColVector.noNulls = true; } for(int i = 0; i != n; i++) { - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchDate1, value, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -154,7 +154,7 @@ public class extends VectorExpression { int i = sel[j]; if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchDate1, value, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -167,7 +167,7 @@ public class extends VectorExpression { for(int i = 0; i != n; i++) { if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchDate1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchDate1, value, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt index a515319021..55d285dab8 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; @@ -41,7 +41,7 @@ public class extends VectorExpression { private final int colNum1; private final int colNum2; - private transient final Timestamp scratchTimestamp1 = new Timestamp(0); + private transient final Timestamp scratchTimestamp1 = new Timestamp(); private transient final DateTimeMath dtm = new DateTimeMath(); public (int colNum1, int colNum2, int outputColumnNum) { @@ -96,12 +96,12 @@ public class extends VectorExpression { * conditional checks in the inner loop. */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); dtm.( scratchTimestamp1, inputColVector2.asScratch(0), outputColVector.getScratch()); outputColVector.setFromScratch(0); } else if (inputColVector1.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; @@ -121,14 +121,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value2, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -138,14 +138,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, inputColVector2.asScratch(i), outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, inputColVector2.asScratch(i), outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt index 0bae7dafaf..415fdf6bdc 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -42,7 +42,7 @@ public class extends VectorExpression { private final int colNum; private final value; - private transient final Timestamp scratchTimestamp1 = new Timestamp(0); + private transient final Timestamp scratchTimestamp1 = new Timestamp(); private transient final DateTimeMath dtm = new DateTimeMath(); public (int colNum, value, int outputColumnNum) { @@ -90,7 +90,7 @@ public class extends VectorExpression { if (inputColVector1.isRepeating) { if (inputColVector1.noNulls || !inputIsNull[0]) { outputIsNull[0] = false; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(0); @@ -111,7 +111,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { final int i = sel[j]; outputIsNull[i] = false; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -119,7 +119,7 @@ public class extends VectorExpression { } else { for(int j = 0; j != n; j++) { final int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -134,7 +134,7 @@ public class extends VectorExpression { outputColVector.noNulls = true; } for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -151,7 +151,7 @@ public class extends VectorExpression { int i = sel[j]; if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -164,7 +164,7 @@ public class extends VectorExpression { for(int i = 0; i != n; i++) { if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.( scratchTimestamp1, value, outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt index 42046e08b8..38b8688a9e 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt @@ -18,9 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Date; import java.util.Arrays; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -52,12 +52,12 @@ public class extends VectorExpression { private final Date value; private transient final HiveIntervalYearMonth scratchIntervalYearMonth2 = new HiveIntervalYearMonth(); - private transient final Date outputDate = new Date(0); + private transient final Date outputDate = new Date(); private transient final DateTimeMath dtm = new DateTimeMath(); public (long value, int colNum, int outputColumnNum) { super(outputColumnNum); - this.value = new Date(DateWritable.daysToMillis((int) value)); + this.value = Date.ofEpochMilli(DateWritable.daysToMillis((int) value)); this.colNum = colNum; } diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt index 191eaa6d17..705a2344c2 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt @@ -18,9 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; import java.util.Arrays; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -55,8 +55,7 @@ public class extends VectorExpression { public (long value, int colNum, int outputColumnNum) { super(outputColumnNum); // Scalar input #1 is type date (days). For the math we convert it to a timestamp. - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = Timestamp.ofEpochMilli(DateWritable.daysToMillis((int) value)); this.colNum = colNum; } diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt index b5a36df8e0..796aea6b8a 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt index 4afed54dfb..c4e9dd25da 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt index 5ae21e69dc..3506b116e7 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.*; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt index 8f8104daed..c9fef9b217 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt index 604060a158..9214a9d83f 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt index 0a541f9b2e..80ae6e1ecd 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt index 68e0006018..3fccfe2de1 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt index c1ddc08d01..6195cb6c4b 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt index d5952de1b6..801c2810dc 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt index 7c5b61469c..1ef2da3c1c 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; @@ -40,8 +40,8 @@ public class extends VectorExpression { private final int colNum2; private transient final HiveIntervalYearMonth scratchIntervalYearMonth1 = new HiveIntervalYearMonth(); - private transient final Date scratchDate2 = new Date(0); - private transient final Date outputDate = new Date(0); + private transient final Date scratchDate2 = new Date(); + private transient final Date outputDate = new Date(); private transient final DateTimeMath dtm = new DateTimeMath(); public (int colNum1, int colNum2, int outputColumnNum) { @@ -99,7 +99,7 @@ public class extends VectorExpression { */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { scratchIntervalYearMonth1.set((int) vector1[0]); - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.( scratchIntervalYearMonth1, scratchDate2, outputDate); outputVector[0] = DateWritable.dateToDays(outputDate); @@ -108,21 +108,21 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( scratchIntervalYearMonth1, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); } } else { for(int i = 0; i != n; i++) { - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( scratchIntervalYearMonth1, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); } } } else if (inputColVector2.isRepeating) { - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; @@ -144,7 +144,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { int i = sel[j]; scratchIntervalYearMonth1.set((int) vector1[i]); - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( scratchIntervalYearMonth1, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -152,7 +152,7 @@ public class extends VectorExpression { } else { for(int i = 0; i != n; i++) { scratchIntervalYearMonth1.set((int) vector1[i]); - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( scratchIntervalYearMonth1, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt index 84d6c4aa6f..aa9ce4c934 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -43,13 +43,13 @@ public class extends VectorExpression { private final Date value; private transient final HiveIntervalYearMonth scratchIntervalYearMonth1 = new HiveIntervalYearMonth(); - private transient final Date outputDate = new Date(0); + private transient final Date outputDate = new Date(); private transient final DateTimeMath dtm = new DateTimeMath(); public (int colNum, long value, int outputColumnNum) { super(outputColumnNum); this.colNum = colNum; - this.value = new Date(DateWritable.daysToMillis((int) value)); + this.value = Date.ofEpochMilli(DateWritable.daysToMillis((int) value)); } public () { diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt index cfe44c1195..9eb367cc6a 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt index 22f7abf93f..cccba8cfff 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt index ffc2cec9b0..7f29e04bd6 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -51,8 +51,8 @@ public class extends VectorExpression { private final HiveIntervalYearMonth value; private final int colNum; - private transient final Date scratchDate2 = new Date(0); - private transient final Date outputDate = new Date(0); + private transient final Date scratchDate2 = new Date(); + private transient final Date outputDate = new Date(); private transient final DateTimeMath dtm = new DateTimeMath(); public (long value, int colNum, int outputColumnNum) { @@ -106,7 +106,7 @@ public class extends VectorExpression { if (inputColVector2.isRepeating) { if (inputColVector2.noNulls || !inputIsNull[0]) { outputIsNull[0] = false; - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.( value, scratchDate2, outputDate); outputVector[0] = DateWritable.dateToDays(outputDate); @@ -128,7 +128,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { final int i = sel[j]; outputIsNull[i] = false; - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -136,7 +136,7 @@ public class extends VectorExpression { } else { for(int j = 0; j != n; j++) { final int i = sel[j]; - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -151,7 +151,7 @@ public class extends VectorExpression { outputColVector.noNulls = true; } for(int i = 0; i != n; i++) { - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -168,7 +168,7 @@ public class extends VectorExpression { int i = sel[j]; if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); @@ -181,7 +181,7 @@ public class extends VectorExpression { for(int i = 0; i != n; i++) { if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchDate2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchDate2, outputDate); outputVector[i] = DateWritable.dateToDays(outputDate); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt index 157e95e52e..7382adf0f4 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt index e3f36b9c75..9258035148 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; import org.apache.hadoop.hive.ql.exec.vector.*; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt index 2957c73203..d6bd238d86 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt index 27d8a3d2d5..80409bca09 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; @@ -40,7 +39,7 @@ public class extends VectorExpression { private final int colNum1; private final int colNum2; - private transient final Timestamp scratchTimestamp2 = new Timestamp(0); + private transient final Timestamp scratchTimestamp2 = new Timestamp(); private transient final DateTimeMath dtm = new DateTimeMath(); public (int colNum1, int colNum2, int outputColumnNum) { @@ -95,7 +94,7 @@ public class extends VectorExpression { * conditional checks in the inner loop. */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.( inputColVector1.asScratch(0), scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(0); @@ -104,21 +103,21 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value1, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value1, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } } else if (inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; @@ -137,14 +136,14 @@ public class extends VectorExpression { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( inputColVector1.asScratch(i), scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( inputColVector1.asScratch(i), scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt index 799daf2a6b..60a6888463 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; @@ -47,8 +47,7 @@ public class extends VectorExpression { public (int colNum, long value, int outputColumnNum) { super(outputColumnNum); this.colNum = colNum; - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = Timestamp.ofEpochMilli(DateWritable.daysToMillis((int) value)); } public () { diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt index f894bcfd8b..c4b747324a 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt index 4240994bf0..2b53d9ed4c 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt index bcb8fd1efe..6bd69f512e 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt index b81b805594..02c23465cc 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt index cee680a525..ec1898e6e1 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt index b50cbc86c4..af3f56bcd1 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -50,7 +50,7 @@ public class extends VectorExpression { private final value; private final int colNum; - private transient final Timestamp scratchTimestamp2 = new Timestamp(0); + private transient final Timestamp scratchTimestamp2 = new Timestamp(); private transient final DateTimeMath dtm = new DateTimeMath(); public ( value, int colNum, int outputColumnNum) { @@ -103,7 +103,7 @@ public class extends VectorExpression { if (inputColVector2.isRepeating) { if (inputColVector2.noNulls || !inputIsNull[0]) { outputIsNull[0] = false; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(0); @@ -125,7 +125,7 @@ public class extends VectorExpression { for(int j = 0; j != n; j++) { final int i = sel[j]; outputIsNull[i] = false; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i);; @@ -133,7 +133,7 @@ public class extends VectorExpression { } else { for(int j = 0; j != n; j++) { final int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -148,7 +148,7 @@ public class extends VectorExpression { outputColVector.noNulls = true; } for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -165,7 +165,7 @@ public class extends VectorExpression { int i = sel[j]; if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); @@ -178,7 +178,7 @@ public class extends VectorExpression { for(int i = 0; i != n; i++) { if (!inputIsNull[i]) { outputIsNull[i] = false; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.( value, scratchTimestamp2, outputColVector.getScratch()); outputColVector.setFromScratch(i); diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt index 9db7b5323d..00811134b8 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt index e860e4df81..0e80f9260a 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt index 970b7cca5c..75651dd3da 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.exec.vector.expressions.*; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt index 10f6162a5d..b3d2a17bb5 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; import java.util.Arrays; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt index 579437e59b..790a0a3b25 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; @@ -61,7 +60,7 @@ public class extends VectorAggregateExpression { transient private boolean isNull = true; public Aggregation() { - value = new Timestamp(0); + value = new Timestamp(); } public void checkValue(TimestampColumnVector colVector, int index) { @@ -81,7 +80,7 @@ public class extends VectorAggregateExpression { @Override public void reset () { isNull = true; - this.value.setTime(0); + this.value.setTimeInMillis(0); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index a7465a7ab6..7b6c4f5e04 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -317,7 +317,7 @@ public String getName() { private Date readDateValue(String dateStr) { // try either yyyy-mm-dd, or integer representing days since epoch try { - DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(dateStr)); + DateWritable writableVal = new DateWritable(org.apache.hadoop.hive.common.type.Date.valueOf(dateStr)); return new Date(writableVal.getDays()); } catch (IllegalArgumentException err) { // Fallback to integer parsing diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index d59bf1fb6e..ee9b92d5ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -289,15 +289,15 @@ system.registerGenericUDF(UNARY_PLUS_FUNC_NAME, GenericUDFOPPositive.class); system.registerGenericUDF(UNARY_MINUS_FUNC_NAME, GenericUDFOPNegative.class); - system.registerUDF("day", UDFDayOfMonth.class, false); - system.registerUDF("dayofmonth", UDFDayOfMonth.class, false); + system.registerGenericUDF("day", UDFDayOfMonth.class); + system.registerGenericUDF("dayofmonth", UDFDayOfMonth.class); system.registerUDF("dayofweek", UDFDayOfWeek.class, false); - system.registerUDF("month", UDFMonth.class, false); + system.registerGenericUDF("month", UDFMonth.class); system.registerGenericUDF("quarter", GenericUDFQuarter.class); - system.registerUDF("year", UDFYear.class, false); - system.registerUDF("hour", UDFHour.class, false); - system.registerUDF("minute", UDFMinute.class, false); - system.registerUDF("second", UDFSecond.class, false); + system.registerGenericUDF("year", UDFYear.class); + system.registerGenericUDF("hour", UDFHour.class); + system.registerGenericUDF("minute", UDFMinute.class); + system.registerGenericUDF("second", UDFSecond.class); system.registerUDF("from_unixtime", UDFFromUnixTime.class, false); system.registerGenericUDF("to_date", GenericUDFDate.class); system.registerUDF("weekofyear", UDFWeekOfYear.class, false); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java index e96619cf86..e411678482 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java @@ -18,11 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Date; -import java.sql.Timestamp; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion; @@ -38,6 +37,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion; import org.apache.hadoop.hive.serde2.io.ByteWritable; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java index 47eaf367ea..709f01398d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java @@ -18,9 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -77,7 +76,7 @@ public static void debugDisplayOneRow(VectorizedRowBatch batch, int index, Strin } else if (colVector instanceof DecimalColumnVector) { sb.append(((DecimalColumnVector) colVector).vector[index].toString()); } else if (colVector instanceof TimestampColumnVector) { - Timestamp timestamp = new Timestamp(0); + Timestamp timestamp = new Timestamp(); ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index); sb.append(timestamp.toString()); } else if (colVector instanceof IntervalDayTimeColumnVector) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java index c555464280..5597289922 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Timestamp; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -27,6 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java index 1f46f2cf19..7af1531034 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java @@ -21,11 +21,11 @@ import org.apache.hive.common.util.Murmur3; import java.sql.Date; -import java.sql.Timestamp; import java.util.Arrays; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.KeyWrapper; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -78,7 +78,7 @@ private HiveDecimalWritable[] decimalValues; private Timestamp[] timestampValues; - private static Timestamp ZERO_TIMESTAMP = new Timestamp(0); + private static Timestamp ZERO_TIMESTAMP = new Timestamp(); private HiveIntervalDayTime[] intervalDayTimeValues; private static HiveIntervalDayTime ZERO_INTERVALDAYTIME= new HiveIntervalDayTime(0, 0); @@ -115,7 +115,7 @@ private VectorHashKeyWrapper(HashContext ctx, int longValuesCount, int doubleVal byteLengths = EMPTY_INT_ARRAY; } for(int i = 0; i < timestampValuesCount; ++i) { - timestampValues[i] = new Timestamp(0); + timestampValues[i] = new Timestamp(); } for(int i = 0; i < intervalDayTimeValuesCount; ++i) { intervalDayTimeValues[i] = new HiveIntervalDayTime(); @@ -356,7 +356,7 @@ public void assignNullDecimal(int keyIndex, int index) { public void assignTimestamp(int index, Timestamp value) { // Do not assign the input value object to the timestampValues array element. // Always copy value using set* methods. - timestampValues[index].setTime(value.getTime()); + timestampValues[index].setTimeInMillis(value.getMillis()); timestampValues[index].setNanos(value.getNanos()); } @@ -367,7 +367,7 @@ public void assignTimestamp(int index, TimestampColumnVector colVector, int elem public void assignNullTimestamp(int keyIndex, int index) { isNull[keyIndex] = true; // assign 0 to simplify hashcode - timestampValues[index].setTime(ZERO_TIMESTAMP.getTime()); + timestampValues[index].setTimeInMillis(ZERO_TIMESTAMP.getMillis()); timestampValues[index].setNanos(ZERO_TIMESTAMP.getNanos()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index abbbe9a42b..37923b8afa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.lang.reflect.Constructor; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -34,6 +32,7 @@ import java.util.regex.Pattern; import org.apache.commons.lang.ArrayUtils; +import org.apache.hadoop.hive.common.type.Date; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; @@ -42,6 +41,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java index d92ec320b5..edfa1eaacf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java @@ -19,26 +19,20 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.io.IOException; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; -import java.util.LinkedList; import java.util.List; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; -import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; -import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -50,18 +44,14 @@ import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; -import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; @@ -70,7 +60,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.DataOutputBuffer; @@ -79,7 +68,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hive.common.util.DateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class VectorizedBatchUtil { private static final Logger LOG = LoggerFactory.getLogger(VectorizedBatchUtil.class); @@ -895,9 +885,9 @@ public static Writable getPrimitiveWritable(PrimitiveCategory primitiveCategory) case LONG: return new LongWritable(0); case TIMESTAMP: - return new TimestampWritable(new Timestamp(0)); + return new TimestampWritable(new Timestamp()); case DATE: - return new DateWritable(new Date(0)); + return new DateWritable(new Date()); case FLOAT: return new FloatWritable(0); case DOUBLE: @@ -972,7 +962,7 @@ public static StringBuilder debugFormatOneRow(VectorizedRowBatch batch, } else if (colVector instanceof DecimalColumnVector) { sb.append(((DecimalColumnVector) colVector).vector[index].toString()); } else if (colVector instanceof TimestampColumnVector) { - Timestamp timestamp = new Timestamp(0); + Timestamp timestamp = new Timestamp(); ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index); sb.append(timestamp.toString()); } else if (colVector instanceof IntervalDayTimeColumnVector) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 6588385b9f..584e8e54fa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -18,45 +18,39 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.io.IOException; -import java.sql.Date; -import java.sql.Timestamp; import java.util.Arrays; import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.common.type.HiveChar; -import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.IOPrepareCache; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; -import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; -import org.apache.hadoop.hive.ql.plan.Explain.Level; -import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; -import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.mapred.FileSplit; -import org.apache.hive.common.util.DateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java index f1a584e0a5..b12deaf6c8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java @@ -44,7 +44,7 @@ public CastDateToTimestamp() { } private void setDays(TimestampColumnVector timestampColVector, long[] vector, int elementNum) { - timestampColVector.getScratchTimestamp().setTime(DateWritable.daysToMillis((int) vector[elementNum])); + timestampColVector.getScratchTimestamp().setTimeInMillis(DateWritable.daysToMillis((int) vector[elementNum])); timestampColVector.setFromScratchTimestamp(elementNum); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java index 8107c44622..f47c79a192 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java @@ -18,9 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java index a3c421241a..cdffa1b62d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java @@ -42,7 +42,7 @@ public CastLongToTimestamp() { } private void setSeconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) { - timestampColVector.getScratchTimestamp().setTime(vector[elementNum] * 1000); + timestampColVector.getScratchTimestamp().setTimeInSeconds(vector[elementNum]); timestampColVector.setFromScratchTimestamp(elementNum); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java index 6a29c62d67..8d8bcfe74b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java @@ -40,7 +40,7 @@ public CastMillisecondsLongToTimestamp() { private void setMilliseconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) { - timestampColVector.getScratchTimestamp().setTime(vector[elementNum]); + timestampColVector.getScratchTimestamp().setTimeInMillis(vector[elementNum]); timestampColVector.setFromScratchTimestamp(elementNum); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java index b55712a89e..0f52be548f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java @@ -18,12 +18,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hive.common.util.DateParser; @@ -38,7 +37,7 @@ private final int inputColumn; - private transient final java.sql.Date sqlDate = new java.sql.Date(0); + private transient final Date sqlDate = new Date(); private transient final DateParser dateParser = new DateParser(); public CastStringToDate() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java index 4cc1be5e62..e59579316b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java @@ -19,13 +19,14 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveChar; +import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java index 7342d9ee70..b9e02eb884 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java @@ -18,14 +18,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - -import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; -import org.apache.hadoop.hive.ql.exec.vector.*; -import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -38,8 +36,8 @@ private final int colNum1; private final int colNum2; - private transient final Timestamp scratchTimestamp1 = new Timestamp(0); - private transient final Timestamp scratchTimestamp2 = new Timestamp(0); + private transient final Timestamp scratchTimestamp1 = new Timestamp(); + private transient final Timestamp scratchTimestamp2 = new Timestamp(); private transient final DateTimeMath dtm = new DateTimeMath(); public DateColSubtractDateColumn(int colNum1, int colNum2, int outputColumnNum) { @@ -96,38 +94,38 @@ public void evaluate(VectorizedRowBatch batch) { * conditional checks in the inner loop. */ if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(0); } else if (inputColVector1.isRepeating) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } } else if (inputColVector2.isRepeating) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } @@ -136,15 +134,15 @@ public void evaluate(VectorizedRowBatch batch) { if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java index 3ea189aa42..f3925a7623 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java @@ -18,19 +18,16 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; -import java.util.Arrays; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; -import org.apache.hadoop.hive.ql.exec.vector.*; -import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; +import java.util.Arrays; + // A type date (LongColumnVector storing epoch days) minus a type date produces a // type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs). public class DateColSubtractDateScalar extends VectorExpression { @@ -40,14 +37,14 @@ private final int colNum; private final Timestamp value; - private transient final Timestamp scratchTimestamp1 = new Timestamp(0); + private transient final Timestamp scratchTimestamp1 = new Timestamp(); private transient final DateTimeMath dtm = new DateTimeMath(); public DateColSubtractDateScalar(int colNum, long value, int outputColumnNum) { super(outputColumnNum); this.colNum = colNum; - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = new Timestamp(); + this.value.setTimeInMillis(DateWritable.daysToMillis((int) value)); } public DateColSubtractDateScalar() { @@ -88,7 +85,7 @@ public void evaluate(VectorizedRowBatch batch) { if (inputColVector1.isRepeating) { if (inputColVector1.noNulls || !inputIsNull[0]) { outputIsNull[0] = false; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[0])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(0); @@ -111,14 +108,14 @@ public void evaluate(VectorizedRowBatch batch) { final int i = sel[j]; // Set isNull before call in case it changes it mind. outputIsNull[i] = false; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { for(int j = 0; j != n; j++) { final int i = sel[j]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } @@ -132,7 +129,7 @@ public void evaluate(VectorizedRowBatch batch) { outputColVector.noNulls = true; } for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } @@ -146,14 +143,14 @@ public void evaluate(VectorizedRowBatch batch) { for(int j = 0; j != n; j++) { int i = sel[j]; outputIsNull[i] = inputIsNull[i]; - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); for(int i = 0; i != n; i++) { - scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i])); + scratchTimestamp1.setTimeInMillis(DateWritable.daysToMillis((int) vector1[i])); dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java index a87ae393de..2ca03074c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java @@ -18,16 +18,16 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; -import java.util.Arrays; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.*; -import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; -import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; +import java.util.Arrays; + // A type date (LongColumnVector storing epoch days) minus a type date produces a // type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs). public class DateScalarSubtractDateColumn extends VectorExpression { @@ -37,14 +37,14 @@ private final Timestamp value; private final int colNum; - private transient final Timestamp scratchTimestamp2 = new Timestamp(0); + private transient final Timestamp scratchTimestamp2 = new Timestamp(); private transient final DateTimeMath dtm = new DateTimeMath(); public DateScalarSubtractDateColumn(long value, int colNum, int outputColumnNum) { super(outputColumnNum); this.colNum = colNum; - this.value = new Timestamp(0); - this.value.setTime(DateWritable.daysToMillis((int) value)); + this.value = new Timestamp(); + this.value.setTimeInMillis(DateWritable.daysToMillis((int) value)); } public DateScalarSubtractDateColumn() { @@ -91,7 +91,7 @@ public void evaluate(VectorizedRowBatch batch) { if (inputColVector2.isRepeating) { if (inputColVector2.noNulls || !inputIsNull[0]) { outputIsNull[0] = false; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[0])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(0); } else { @@ -108,14 +108,14 @@ public void evaluate(VectorizedRowBatch batch) { for(int j = 0; j != n; j++) { int i = sel[j]; outputIsNull[i] = false; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { Arrays.fill(outputIsNull, 0, n, false); for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } @@ -129,14 +129,14 @@ public void evaluate(VectorizedRowBatch batch) { for(int j = 0; j != n; j++) { int i = sel[j]; outputIsNull[i] = inputIsNull[i]; - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } } else { System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); for(int i = 0; i != n; i++) { - scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i])); + scratchTimestamp2.setTimeInMillis(DateWritable.daysToMillis((int) vector2[i])); dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime()); outputColVector.setFromScratchIntervalDayTime(i); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java index 252a8163d9..7b06c300e0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java @@ -18,11 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.DynamicValue; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java index ca92c7c1b7..f7b7ce5698 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.Arrays; import java.util.HashSet; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java index d671108432..804d5fa49d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; +import org.apache.hadoop.hive.common.type.Timestamp; public interface ITimestampInExpr { void setInListValues(Timestamp[] inVals); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java index fcd7ca43e3..eda93bc49b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java index c0cb2c1b96..9ccc0662b8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.Arrays; -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java index 510774a263..f006420cf5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java index 0798f1fe62..f0499eba24 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java @@ -18,14 +18,13 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.Arrays; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; /** * Compute IF(expr1, expr2, expr3) for 3 input column expressions. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java index 0c4cce0dbf..8a8ceb5ee9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; -import java.sql.Timestamp; - /** * Compute IF(expr1, expr2, expr3) for 3 input expressions. * The first is always a boolean (LongColumnVector). diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java index 0059c58690..e6d820338b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import java.sql.Timestamp; import java.util.Arrays; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java index 31a0ad1661..a72f450469 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.Arrays; import java.util.HashSet; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java index 48638b7252..28448207bd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java index 55dc461985..0ac021bfa5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -27,15 +25,33 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; -import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils; +import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.serde2.io.*; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -58,9 +74,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableStringObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; @@ -69,7 +85,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.io.Text; -import org.apache.hive.common.util.DateUtils; /** * VectorExpressionWritableFactory helper class for generating VectorExpressionWritable objects. @@ -816,14 +831,14 @@ private static VectorExpressionWriter genVectorExpressionWritableDate( public VectorExpressionWriter init(SettableDateObjectInspector objInspector) throws HiveException { super.init(objInspector); - dt = new Date(0); + dt = new Date(); obj = initValue(null); return this; } @Override public Object writeValue(long value) { - dt.setTime(DateWritable.daysToMillis((int) value)); + dt.setTimeInMillis(DateWritable.daysToMillis((int) value)); ((SettableDateObjectInspector) this.objectInspector).set(obj, dt); return obj; } @@ -833,14 +848,14 @@ public Object setValue(Object field, long value) { if (null == field) { field = initValue(null); } - dt.setTime(DateWritable.daysToMillis((int) value)); + dt.setTimeInMillis(DateWritable.daysToMillis((int) value)); ((SettableDateObjectInspector) this.objectInspector).set(field, dt); return field; } @Override public Object initValue(Object ignored) { - return ((SettableDateObjectInspector) this.objectInspector).create(new Date(0)); + return ((SettableDateObjectInspector) this.objectInspector).create(new Date()); } }.init(fieldObjInspector); @@ -896,7 +911,7 @@ public Object setValue(Object field, Timestamp value) { @Override public Object initValue(Object ignored) { - return ((SettableTimestampObjectInspector) this.objectInspector).create(new Timestamp(0)); + return ((SettableTimestampObjectInspector) this.objectInspector).create(new Timestamp()); } }.init(fieldObjInspector); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java index f6e9c8b6df..e3aa9c69c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -31,9 +32,6 @@ import org.apache.hadoop.io.Text; import org.apache.hive.common.util.DateParser; -import java.util.Arrays; -import java.sql.Date; - public class VectorUDFDateAddColCol extends VectorExpression { private static final long serialVersionUID = 1L; @@ -43,7 +41,7 @@ protected boolean isPositive = true; private transient final Text text = new Text(); - private transient final Date date = new Date(0); + private transient final Date date = new Date(); private transient final DateParser dateParser = new DateParser(); // Transient members initialized by transientInit method. @@ -195,7 +193,7 @@ protected void evaluateString(BytesColumnVector inputColumnVector1, LongColumnVe outputVector.isNull[index] = true; return; } - long days = DateWritable.millisToDays(date.getTime()); + long days = DateWritable.millisToDays(date.getMillis()); if (isPositive) { days += numDays; } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java index 7bb5c54f23..ecf6d42339 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -31,7 +32,6 @@ import org.apache.hadoop.io.Text; import org.apache.hive.common.util.DateParser; -import java.sql.Date; import java.util.Arrays; public class VectorUDFDateAddColScalar extends VectorExpression { @@ -44,7 +44,7 @@ private transient final Text text = new Text(); private transient final DateParser dateParser = new DateParser(); - private transient final Date date = new Date(0); + private transient final Date date = new Date(); // Transient members initialized by transientInit method. private transient PrimitiveCategory primitiveCategory; @@ -334,7 +334,7 @@ protected void evaluateString(ColumnVector columnVector, LongColumnVector output outputVector.isNull[i] = true; return; } - long days = DateWritable.millisToDays(date.getTime()); + long days = DateWritable.millisToDays(date.getMillis()); if (isPositive) { days += numDays; } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java index ecde39b492..704e5c7722 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -28,8 +30,6 @@ import org.apache.hive.common.util.DateParser; import java.nio.charset.StandardCharsets; -import java.sql.Date; -import java.sql.Timestamp; import java.util.Arrays; @@ -45,7 +45,7 @@ protected boolean isPositive = true; private transient final DateParser dateParser = new DateParser(); - private transient final Date baseDate = new Date(0); + private transient final Date baseDate = new Date(); // Transient members initialized by transientInit method. private transient PrimitiveCategory primitiveCategory; @@ -97,11 +97,11 @@ public void evaluate(VectorizedRowBatch batch) { switch (primitiveCategory) { case DATE: - baseDate.setTime(DateWritable.daysToMillis((int) longValue)); + baseDate.setTimeInMillis(DateWritable.daysToMillis((int) longValue)); break; case TIMESTAMP: - baseDate.setTime(timestampValue.getTime()); + baseDate.setTimeInMillis(timestampValue.getMillis()); break; case STRING: @@ -135,7 +135,7 @@ public void evaluate(VectorizedRowBatch batch) { // We do not need to do a column reset since we are carefully changing the output. outputColVector.isRepeating = false; - long baseDateDays = DateWritable.millisToDays(baseDate.getTime()); + long baseDateDays = DateWritable.millisToDays(baseDate.getMillis()); if (inputCol.isRepeating) { if (inputCol.noNulls || !inputCol.isNull[0]) { outputColVector.isNull[0] = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java index 0d794fe731..faad402aab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; @@ -31,18 +31,13 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import java.sql.Date; -import java.text.ParseException; -import java.text.SimpleDateFormat; - public class VectorUDFDateDiffColCol extends VectorExpression { private static final long serialVersionUID = 1L; private final int colNum1; private final int colNum2; - private transient final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - private transient final Date date = new Date(0); + private transient final Date date = new Date(); // Transient members initialized by transientInit method. private transient LongColumnVector dateVector1; @@ -220,9 +215,9 @@ public void copySelected( if (!input.isNull[0]) { String string = new String(input.vector[0], input.start[0], input.length[0]); try { - date.setTime(formatter.parse(string).getTime()); + date.setTimeInMillis(Date.valueOf(string).getMillis()); output.vector[0] = DateWritable.dateToDays(date); - } catch (ParseException e) { + } catch (IllegalArgumentException e) { output.isNull[0] = true; } } @@ -274,9 +269,9 @@ public void copySelected( private void setDays(BytesColumnVector input, LongColumnVector output, int i) { String string = new String(input.vector[i], input.start[i], input.length[i]); try { - date.setTime(formatter.parse(string).getTime()); + date.setTimeInMillis(Date.valueOf(string).getMillis()); output.vector[i] = DateWritable.dateToDays(date); - } catch (ParseException e) { + } catch (IllegalArgumentException e) { output.isNull[i] = true; output.noNulls = false; } @@ -297,7 +292,7 @@ public void copySelected( output.isRepeating = true; if (!input.isNull[0]) { - date.setTime(input.getTime(0)); + date.setTimeInMillis(input.getTime(0)); output.vector[0] = DateWritable.dateToDays(date); } return; @@ -310,12 +305,12 @@ public void copySelected( if (selectedInUse) { for (int j = 0; j < size; j++) { int i = sel[j]; - date.setTime(input.getTime(i)); + date.setTimeInMillis(input.getTime(i)); output.vector[i] = DateWritable.dateToDays(date); } } else { for (int i = 0; i < size; i++) { - date.setTime(input.getTime(i)); + date.setTimeInMillis(input.getTime(i)); output.vector[i] = DateWritable.dateToDays(date); } } @@ -334,14 +329,14 @@ public void copySelected( for (int j = 0; j < size; j++) { int i = sel[j]; if (!input.isNull[i]) { - date.setTime(input.getTime(i)); + date.setTimeInMillis(input.getTime(i)); output.vector[i] = DateWritable.dateToDays(date); } } } else { for (int i = 0; i < size; i++) { if (!input.isNull[i]) { - date.setTime(input.getTime(i)); + date.setTimeInMillis(input.getTime(i)); output.vector[i] = DateWritable.dateToDays(date); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java index 08c91e23e3..1cbeb7a041 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java @@ -18,7 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -30,11 +31,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.io.Text; -import java.nio.charset.StandardCharsets; -import java.sql.Date; -import java.sql.Timestamp; -import java.text.ParseException; -import java.text.SimpleDateFormat; import java.util.Arrays; public class VectorUDFDateDiffColScalar extends VectorExpression { @@ -46,9 +42,8 @@ private Timestamp timestampValue; private byte[] bytesValue; - private transient final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); private transient final Text text = new Text(); - private transient final Date date = new Date(0); + private transient final Date date = new Date(); private int baseDate; @@ -104,7 +99,7 @@ public void evaluate(VectorizedRowBatch batch) { break; case TIMESTAMP: - date.setTime(timestampValue.getTime()); + date.setTimeInMillis(timestampValue.getMillis()); baseDate = DateWritable.dateToDays(date); break; @@ -112,7 +107,7 @@ public void evaluate(VectorizedRowBatch batch) { case CHAR: case VARCHAR: try { - date.setTime(formatter.parse(new String(bytesValue, "UTF-8")).getTime()); + date.setTimeInDays(Date.valueOf(new String(bytesValue, "UTF-8")).getDays()); baseDate = DateWritable.dateToDays(date); break; } catch (Exception e) { @@ -344,7 +339,7 @@ public void evaluate(VectorizedRowBatch batch) { protected int evaluateTimestamp(ColumnVector columnVector, int index) { TimestampColumnVector tcv = (TimestampColumnVector) columnVector; - date.setTime(tcv.getTime(index)); + date.setTimeInMillis(tcv.getTime(index)); return DateWritable.dateToDays(date) - baseDate; } @@ -357,9 +352,9 @@ protected void evaluateString(ColumnVector columnVector, LongColumnVector output BytesColumnVector bcv = (BytesColumnVector) columnVector; text.set(bcv.vector[i], bcv.start[i], bcv.length[i]); try { - date.setTime(formatter.parse(text.toString()).getTime()); + date.setTimeInDays(Date.valueOf(text.toString()).getDays()); output.vector[i] = DateWritable.dateToDays(date) - baseDate; - } catch (ParseException e) { + } catch (IllegalArgumentException e) { output.vector[i] = 1; output.isNull[i] = true; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java index c436c9606e..79827fd9bb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -29,8 +31,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.io.Text; -import java.sql.Date; -import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; @@ -46,7 +46,7 @@ private transient final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); private transient final Text text = new Text(); - private transient final Date date = new Date(0); + private transient final Date date = new Date(); private int baseDate; @@ -103,7 +103,7 @@ public void evaluate(VectorizedRowBatch batch) { break; case TIMESTAMP: - date.setTime(timestampValue.getTime()); + date.setTimeInMillis(timestampValue.getMillis()); baseDate = DateWritable.dateToDays(date); break; @@ -111,7 +111,7 @@ public void evaluate(VectorizedRowBatch batch) { case CHAR: case VARCHAR: try { - date.setTime(formatter.parse(new String(stringValue, "UTF-8")).getTime()); + date.setTimeInMillis(formatter.parse(new String(stringValue, "UTF-8")).getTime()); baseDate = DateWritable.dateToDays(date); break; } catch (Exception e) { @@ -344,7 +344,7 @@ public void evaluate(VectorizedRowBatch batch) { protected int evaluateTimestamp(ColumnVector columnVector, int index) { TimestampColumnVector tcv = (TimestampColumnVector) columnVector; - date.setTime(tcv.getTime(index)); + date.setTimeInMillis(tcv.getTime(index)); return baseDate - DateWritable.dateToDays(date); } @@ -357,7 +357,7 @@ protected void evaluateString(ColumnVector columnVector, LongColumnVector output BytesColumnVector bcv = (BytesColumnVector) columnVector; text.set(bcv.vector[i], bcv.start[i], bcv.length[i]); try { - date.setTime(formatter.parse(text.toString()).getTime()); + date.setTimeInMillis(formatter.parse(text.toString()).getTime()); output.vector[i] = baseDate - DateWritable.dateToDays(date); } catch (ParseException e) { output.vector[i] = 1; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java index 740a00c219..70b3579e27 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java @@ -69,7 +69,7 @@ public void transientInit() throws HiveException { } protected long getTimestampField(TimestampColumnVector timestampColVector, int elementNum) { - calendar.setTime(timestampColVector.asScratchTimestamp(elementNum)); + calendar.setTimeInMillis(timestampColVector.asScratchTimestamp(elementNum).getMillis()); return calendar.get(field); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java index a69c9f7231..cefd7d5a0b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java @@ -30,7 +30,7 @@ @Override protected long getTimestampField(TimestampColumnVector timestampColVector, int elementNum) { - return timestampColVector.asScratchTimestamp(elementNum).getTime() / 1000; + return timestampColVector.asScratchTimestamp(elementNum).getSeconds(); } public VectorUDFUnixTimeStampTimestamp(int colNum, int outputColumnNum) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java index 39fab2cba2..dbbfc85443 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFOperator.java @@ -18,23 +18,19 @@ package org.apache.hadoop.hive.ql.exec.vector.ptf; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; -import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector; @@ -45,26 +41,20 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.BaseWork; -import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PTFDesc; import org.apache.hadoop.hive.ql.plan.VectorDesc; import org.apache.hadoop.hive.ql.plan.VectorPTFDesc; -import org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType; +import org.apache.hadoop.hive.ql.plan.VectorPTFInfo; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; -import org.apache.hadoop.hive.ql.plan.VectorPTFInfo; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -528,7 +518,7 @@ private void setCurrentPartition(VectorizedRowBatch batch) { break; case TIMESTAMP: if (currentPartitionTimestamps[i] == null) { - currentPartitionTimestamps[i] = new Timestamp(0); + currentPartitionTimestamps[i] = new Timestamp(); } ((TimestampColumnVector) colVector).timestampUpdate(currentPartitionTimestamps[i], 0); break; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index 71682af364..1f7aec6b80 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -19,12 +19,10 @@ package org.apache.hadoop.hive.ql.io.orc; import java.io.IOException; -import java.sql.Timestamp; import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -41,6 +39,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; @@ -61,9 +60,6 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; -import com.google.common.annotations.VisibleForTesting; -import org.apache.orc.PhysicalWriter; - /** * An ORC file writer. The file is divided into stripes, which is the natural * unit of work when reading. Each stripe is buffered in memory until the @@ -195,9 +191,8 @@ static void setColumn(int rowId, ColumnVector column, } case TIMESTAMP: { TimestampColumnVector vector = (TimestampColumnVector) column; - Timestamp ts = ((TimestampObjectInspector) inspector) - .getPrimitiveJavaObject(obj); - vector.set(rowId, ts); + vector.set(rowId, ((TimestampObjectInspector) inspector) + .getPrimitiveJavaObject(obj)); break; } case DATE: { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java index 21762cd78b..70bc1fcd81 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java @@ -14,10 +14,10 @@ package org.apache.hadoop.hive.ql.io.parquet.convert; import java.math.BigDecimal; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Map; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java index 677fb53028..f08b23403a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java @@ -13,12 +13,13 @@ */ package org.apache.hadoop.hive.ql.io.parquet.timestamp; -import java.sql.Timestamp; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.TimeZone; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.common.type.Timestamp; + import jodd.datetime.JDateTime; /** @@ -58,7 +59,7 @@ public static Calendar getCalendar(boolean skipConversion) { public static NanoTime getNanoTime(Timestamp ts, boolean skipConversion) { Calendar calendar = getCalendar(skipConversion); - calendar.setTime(ts); + calendar.setTimeInMillis(ts.getMillis()); int year = calendar.get(Calendar.YEAR); if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) { year = 1 - year; @@ -106,8 +107,7 @@ public static Timestamp getTimestamp(NanoTime nt, boolean skipConversion) { calendar.set(Calendar.HOUR_OF_DAY, hour); calendar.set(Calendar.MINUTE, minutes); calendar.set(Calendar.SECOND, seconds); - Timestamp ts = new Timestamp(calendar.getTimeInMillis()); - ts.setNanos((int) nanos); + Timestamp ts = Timestamp.ofEpochMilli(calendar.getTimeInMillis(), (int) nanos); return ts; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java index 5e6802282d..c7b60c739f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.ql.io.parquet.vector; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.parquet.column.Dictionary; import java.io.IOException; -import java.sql.Timestamp; /** * The interface to wrap the underlying Parquet dictionary and non dictionary encoded page reader. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java index 17d6e338e6..bacc365122 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.io.parquet.vector; import org.apache.hadoop.hive.common.type.HiveBaseChar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; @@ -38,7 +39,6 @@ import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.nio.ByteOrder; -import java.sql.Timestamp; import java.util.Arrays; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java index 4e6993b61a..e305d85828 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java @@ -13,6 +13,7 @@ */ package org.apache.hadoop.hive.ql.io.parquet.vector; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java index cf1210befc..95e9afea38 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java @@ -13,9 +13,9 @@ */ package org.apache.hadoop.hive.ql.io.parquet.write; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -46,9 +46,9 @@ import org.apache.parquet.schema.GroupType; import org.apache.parquet.schema.OriginalType; import org.apache.parquet.schema.Type; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.sql.Date; -import java.sql.Timestamp; import java.util.List; import java.util.Map; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java index 12af94e337..d950991a4c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.ql.optimizer.calcite.translator; import java.math.BigDecimal; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; @@ -45,9 +43,11 @@ import org.apache.calcite.util.DateString; import org.apache.calcite.util.TimeString; import org.apache.calcite.util.TimestampString; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.Hive; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java index 7a482d968f..6eb930871e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java @@ -19,14 +19,12 @@ import java.math.BigDecimal; import java.math.BigInteger; -import java.sql.Timestamp; import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Calendar; -import java.util.Date; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import org.apache.calcite.avatica.util.TimeUnit; @@ -53,12 +51,14 @@ import org.apache.calcite.util.DateString; import org.apache.calcite.util.NlsString; import org.apache.calcite.util.TimestampString; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -393,8 +393,6 @@ private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, List c GenericUDF udf = func.getGenericUDF(); if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar) || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate) - // Calcite can not specify the scale for timestamp. As a result, all - // the millisecond part will be lost || (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ) || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) { castExpr = cluster.getRexBuilder().makeAbstractCast( @@ -676,9 +674,9 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value)); break; case DATE: - final Calendar cal = Calendar.getInstance(Locale.getDefault()); - cal.setTime((Date) value); - calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal)); + final Date date = (Date) value; + calciteLiteral = rexBuilder.makeDateLiteral( + DateString.fromDaysSinceEpoch(date.getDays())); break; case TIMESTAMP: final TimestampString tsString; @@ -686,9 +684,7 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx tsString = TimestampString.fromCalendarFields((Calendar) value); } else { final Timestamp ts = (Timestamp) value; - final Calendar calt = Calendar.getInstance(Locale.getDefault()); - calt.setTimeInMillis(ts.getTime()); - tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos()); + tsString = TimestampString.fromMillisSinceEpoch(ts.getMillis()).withNanos(ts.getNanos()); } // Must call makeLiteral, not makeTimestampLiteral // to have the RexBuilder.roundTime logic kick in diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index d940cddaa1..3406f24467 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.io.Serializable; import java.io.UnsupportedEncodingException; -import java.sql.Date; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -43,6 +42,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -2077,7 +2077,7 @@ private static String normalizeDateCol( Object colValue, String originalColSpec) throws SemanticException { Date value; if (colValue instanceof DateWritable) { - value = ((DateWritable) colValue).get(false); // Time doesn't matter. + value = ((DateWritable) colValue).get(); // Time doesn't matter. } else if (colValue instanceof Date) { value = (Date) colValue; } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 3e7b3a1d6a..3f56eff280 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -19,8 +19,7 @@ package org.apache.hadoop.hive.ql.parse; import java.math.BigDecimal; -import java.sql.Date; -import java.sql.Timestamp; + import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -33,10 +32,12 @@ import org.apache.calcite.rel.RelNode; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.math.NumberUtils; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZUtil; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java index 21164b7013..71f854b05c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java @@ -25,6 +25,7 @@ import java.util.Map; import java.util.NoSuchElementException; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; @@ -59,13 +60,9 @@ public TimestampWritable evaluate(TimestampWritable t) { if (t == null) { return null; } - final long originalTimestamp = t.getTimestamp().getTime(); // default - final long originalTimestampUTC = new DateTime(originalTimestamp) - .withZoneRetainFields(DateTimeZone.UTC).getMillis(); // default -> utc - final long newTimestampUTC = granularity.truncate(originalTimestampUTC); // utc - final long newTimestamp = new DateTime(newTimestampUTC, DateTimeZone.UTC) - .withZoneRetainFields(DateTimeZone.getDefault()).getMillis(); // utc -> default - resultTS.setTime(newTimestamp); + final long originalTimestamp = t.getTimestamp().getMillis(); + final long newTimestamp = granularity.truncate(originalTimestamp); + resultTS.set(Timestamp.ofEpochMilli(newTimestamp)); return resultTS; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java index f7749547de..f63a726ade 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java @@ -18,23 +18,23 @@ package org.apache.hadoop.hive.ql.udf; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthDate; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthString; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthTimestamp; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.NDV; -import org.apache.hadoop.hive.serde2.io.DateWritable; -import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; /** * UDFDayOfMonth. @@ -51,66 +51,78 @@ + " > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + " 30") @VectorizedExpressions({VectorUDFDayOfMonthDate.class, VectorUDFDayOfMonthString.class, VectorUDFDayOfMonthTimestamp.class}) @NDV(maxNdv = 31) -public class UDFDayOfMonth extends UDF { - private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - private final Calendar calendar = Calendar.getInstance(); - - private final IntWritable result = new IntWritable(); +public class UDFDayOfMonth extends GenericUDF { - public UDFDayOfMonth() { - } + private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1]; + private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1]; + private final IntWritable output = new IntWritable(); - /** - * Get the day of month from a date string. - * - * @param dateString - * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or - * "yyyy-MM-dd". - * @return an int from 1 to 31. null if the dateString is not a valid date - * string. - */ - public IntWritable evaluate(Text dateString) { - - if (dateString == null) { - return null; + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + checkArgsSize(arguments, 1, 1); + checkArgPrimitive(arguments, 0); + switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) { + case INTERVAL_DAY_TIME: + inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME; + converters[0] = ObjectInspectorConverters.getConverter( + arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + obtainDateConverter(arguments, 0, inputTypes, converters); + break; + default: + // build error message + StringBuilder sb = new StringBuilder(); + sb.append(getFuncName()); + sb.append(" does not take "); + sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()); + sb.append(" type"); + throw new UDFArgumentTypeException(0, sb.toString()); } - try { - Date date = formatter.parse(dateString.toString()); - calendar.setTime(date); - result.set(calendar.get(Calendar.DAY_OF_MONTH)); - return result; - } catch (ParseException e) { - return null; - } + ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector; + return outputOI; } - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + switch (inputTypes[0]) { + case INTERVAL_DAY_TIME: + HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters); + if (intervalDayTime == null) { + return null; + } + output.set(intervalDayTime.getDays()); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + Date date = getDateValue(arguments, 0, inputTypes, converters); + if (date == null) { + return null; + } + output.set(date.getLocalDate().getDayOfMonth()); } - - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.DAY_OF_MONTH)); - return result; + return output; } - public IntWritable evaluate(TimestampWritable t) { - if (t == null) { - return null; - } - - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.DAY_OF_MONTH)); - return result; + @Override + protected String getFuncName() { + return "day"; } - public IntWritable evaluate(HiveIntervalDayTimeWritable i) { - if (i == null) { - return null; - } - - result.set(i.getHiveIntervalDayTime().getDays()); - return result; + @Override + public String getDisplayString(String[] children) { + return getStandardDisplayString(getFuncName(), children); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java index 88e6d9466b..3bfba6422c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java @@ -19,10 +19,8 @@ package org.apache.hadoop.hive.ql.udf; import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; @@ -51,8 +49,6 @@ @VectorizedExpressions({VectorUDFDayOfWeekDate.class, VectorUDFDayOfWeekString.class, VectorUDFDayOfWeekTimestamp.class}) @NDV(maxNdv = 7) public class UDFDayOfWeek extends UDF { - private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - private final Calendar calendar = Calendar.getInstance(); private final IntWritable result = new IntWritable(); @@ -73,11 +69,10 @@ public IntWritable evaluate(Text dateString) { return null; } try { - Date date = formatter.parse(dateString.toString()); - calendar.setTime(date); - result.set(calendar.get(Calendar.DAY_OF_WEEK)); + Date date = Date.valueOf(dateString.toString()); + result.set(date.getLocalDate().getDayOfWeek().getValue()); return result; - } catch (ParseException e) { + } catch (IllegalArgumentException e) { return null; } } @@ -87,8 +82,7 @@ public IntWritable evaluate(DateWritable d) { return null; } - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.DAY_OF_WEEK)); + result.set(d.get().getLocalDate().getDayOfWeek().getValue()); return result; } @@ -97,8 +91,7 @@ public IntWritable evaluate(TimestampWritable t) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.DAY_OF_WEEK)); + result.set(t.getTimestamp().getLocalDateTime().getDayOfWeek().plus(1).getValue()); return result; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java index a0c4e96a40..9a02773cfe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java @@ -18,22 +18,23 @@ package org.apache.hadoop.hive.ql.udf; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourDate; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourString; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourTimestamp; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.NDV; -import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; /** * UDFHour. @@ -51,62 +52,78 @@ + " > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + " 12") @VectorizedExpressions({VectorUDFHourDate.class, VectorUDFHourString.class, VectorUDFHourTimestamp.class}) @NDV(maxNdv = 24) -public class UDFHour extends UDF { - private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss"); - private final Calendar calendar = Calendar.getInstance(); - - private final IntWritable result = new IntWritable(); - - public UDFHour() { - } +public class UDFHour extends GenericUDF { - /** - * Get the hour from a date string. - * - * @param dateString - * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or - * "yyyy-MM-dd". - * @return an int from 0 to 23. null if the dateString is not a valid date - * string. - */ - public IntWritable evaluate(Text dateString) { + private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1]; + private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1]; + private final IntWritable output = new IntWritable(); - if (dateString == null) { - return null; + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + checkArgsSize(arguments, 1, 1); + checkArgPrimitive(arguments, 0); + switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) { + case INTERVAL_DAY_TIME: + inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME; + converters[0] = ObjectInspectorConverters.getConverter( + arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + obtainTimestampConverter(arguments, 0, inputTypes, converters); + break; + default: + // build error message + StringBuilder sb = new StringBuilder(); + sb.append(getFuncName()); + sb.append(" does not take "); + sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()); + sb.append(" type"); + throw new UDFArgumentTypeException(0, sb.toString()); } - try { - Date date = null; - try { - date = formatter1.parse(dateString.toString()); - } catch (ParseException e) { - date = formatter2.parse(dateString.toString()); - } - calendar.setTime(date); - result.set(calendar.get(Calendar.HOUR_OF_DAY)); - return result; - } catch (ParseException e) { - return null; - } + ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector; + return outputOI; } - public IntWritable evaluate(TimestampWritable t) { - if (t == null) { - return null; + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + switch (inputTypes[0]) { + case INTERVAL_DAY_TIME: + HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters); + if (intervalDayTime == null) { + return null; + } + output.set(intervalDayTime.getHours()); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + Timestamp ts = getTimestampValue(arguments, 0, converters); + if (ts == null) { + return null; + } + output.set(ts.getLocalDateTime().getHour()); } - - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.HOUR_OF_DAY)); - return result; + return output; } - public IntWritable evaluate(HiveIntervalDayTimeWritable i) { - if (i == null) { - return null; - } + @Override + protected String getFuncName() { + return "hour"; + } - result.set(i.getHiveIntervalDayTime().getHours()); - return result; + @Override + public String getDisplayString(String[] children) { + return getStandardDisplayString(getFuncName(), children); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java index 306d45816c..057f3edc40 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java @@ -18,22 +18,23 @@ package org.apache.hadoop.hive.ql.udf; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteDate; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteString; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteTimestamp; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.NDV; -import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; /** * UDFMinute. @@ -51,62 +52,78 @@ + " > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + " 58") @VectorizedExpressions({VectorUDFMinuteDate.class, VectorUDFMinuteString.class, VectorUDFMinuteTimestamp.class}) @NDV(maxNdv = 60) -public class UDFMinute extends UDF { - private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss"); - private final Calendar calendar = Calendar.getInstance(); - - private final IntWritable result = new IntWritable(); - - public UDFMinute() { - } +public class UDFMinute extends GenericUDF { - /** - * Get the minute from a date string. - * - * @param dateString - * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or - * "yyyy-MM-dd". - * @return an int from 0 to 59. null if the dateString is not a valid date - * string. - */ - public IntWritable evaluate(Text dateString) { + private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1]; + private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1]; + private final IntWritable output = new IntWritable(); - if (dateString == null) { - return null; + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + checkArgsSize(arguments, 1, 1); + checkArgPrimitive(arguments, 0); + switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) { + case INTERVAL_DAY_TIME: + inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME; + converters[0] = ObjectInspectorConverters.getConverter( + arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + obtainTimestampConverter(arguments, 0, inputTypes, converters); + break; + default: + // build error message + StringBuilder sb = new StringBuilder(); + sb.append(getFuncName()); + sb.append(" does not take "); + sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()); + sb.append(" type"); + throw new UDFArgumentTypeException(0, sb.toString()); } - try { - Date date = null; - try { - date = formatter1.parse(dateString.toString()); - } catch (ParseException e) { - date = formatter2.parse(dateString.toString()); - } - calendar.setTime(date); - result.set(calendar.get(Calendar.MINUTE)); - return result; - } catch (ParseException e) { - return null; - } + ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector; + return outputOI; } - public IntWritable evaluate(TimestampWritable t) { - if (t == null) { - return null; + @Override + public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException { + switch (inputTypes[0]) { + case INTERVAL_DAY_TIME: + HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters); + if (intervalDayTime == null) { + return null; + } + output.set(intervalDayTime.getMinutes()); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + Timestamp ts = getTimestampValue(arguments, 0, converters); + if (ts == null) { + return null; + } + output.set(ts.getLocalDateTime().getMinute()); } - - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.MINUTE)); - return result; + return output; } - public IntWritable evaluate(HiveIntervalDayTimeWritable i) { - if (i == null) { - return null; - } + @Override + protected String getFuncName() { + return "minute"; + } - result.set(i.getHiveIntervalDayTime().getMinutes()); - return result; + @Override + public String getDisplayString(String[] children) { + return getStandardDisplayString(getFuncName(), children); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java index 7995934c1f..4b6f0a5c76 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java @@ -18,23 +18,23 @@ package org.apache.hadoop.hive.ql.udf; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthDate; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthString; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthTimestamp; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.NDV; -import org.apache.hadoop.hive.serde2.io.DateWritable; -import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; /** * UDFMonth. @@ -51,64 +51,78 @@ + " > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + " 7") @VectorizedExpressions({VectorUDFMonthDate.class, VectorUDFMonthString.class, VectorUDFMonthTimestamp.class}) @NDV(maxNdv = 31) -public class UDFMonth extends UDF { - private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - private final Calendar calendar = Calendar.getInstance(); - - private final IntWritable result = new IntWritable(); +public class UDFMonth extends GenericUDF { - public UDFMonth() { - } - - /** - * Get the month from a date string. - * - * @param dateString - * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or - * "yyyy-MM-dd". - * @return an int from 1 to 12. null if the dateString is not a valid date - * string. - */ - public IntWritable evaluate(Text dateString) { - if (dateString == null) { - return null; - } - try { - Date date = formatter.parse(dateString.toString()); - calendar.setTime(date); - result.set(1 + calendar.get(Calendar.MONTH)); - return result; - } catch (ParseException e) { - return null; - } - } + private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1]; + private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1]; + private final IntWritable output = new IntWritable(); - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + checkArgsSize(arguments, 1, 1); + checkArgPrimitive(arguments, 0); + switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) { + case INTERVAL_YEAR_MONTH: + inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH; + converters[0] = ObjectInspectorConverters.getConverter( + arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + obtainDateConverter(arguments, 0, inputTypes, converters); + break; + default: + // build error message + StringBuilder sb = new StringBuilder(); + sb.append(getFuncName()); + sb.append(" does not take "); + sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()); + sb.append(" type"); + throw new UDFArgumentTypeException(0, sb.toString()); } - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(1 + calendar.get(Calendar.MONTH)); - return result; + ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector; + return outputOI; } - public IntWritable evaluate(TimestampWritable t) { - if (t == null) { - return null; + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + switch (inputTypes[0]) { + case INTERVAL_YEAR_MONTH: + HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters); + if (intervalYearMonth == null) { + return null; + } + output.set(intervalYearMonth.getMonths()); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + Date date = getDateValue(arguments, 0, inputTypes, converters); + if (date == null) { + return null; + } + output.set(date.getLocalDate().getMonthValue()); } - - calendar.setTime(t.getTimestamp()); - result.set(1 + calendar.get(Calendar.MONTH)); - return result; + return output; } - public IntWritable evaluate(HiveIntervalYearMonthWritable i) { - if (i == null) { - return null; - } + @Override + protected String getFuncName() { + return "month"; + } - result.set(i.getHiveIntervalYearMonth().getMonths()); - return result; + @Override + public String getDisplayString(String[] children) { + return getStandardDisplayString(getFuncName(), children); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java index 5bf8b246af..13d08f5123 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java @@ -18,25 +18,24 @@ package org.apache.hadoop.hive.ql.udf; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; - import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondDate; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondString; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondTimestamp; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.NDV; -import org.apache.hadoop.hive.serde2.io.DoubleWritable; -import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; -import org.apache.hive.common.util.DateUtils; + /** * UDFSecond. @@ -54,64 +53,78 @@ + " > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + " 59") @VectorizedExpressions({VectorUDFSecondDate.class, VectorUDFSecondString.class, VectorUDFSecondTimestamp.class}) @NDV(maxNdv = 60) -public class UDFSecond extends UDF { - private final SimpleDateFormat formatter1 = new SimpleDateFormat( - "yyyy-MM-dd HH:mm:ss"); - private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss"); - private final Calendar calendar = Calendar.getInstance(); - - private final IntWritable result = new IntWritable(); - - public UDFSecond() { - } +public class UDFSecond extends GenericUDF { - /** - * Get the minute from a date string. - * - * @param dateString - * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or - * "yyyy-MM-dd". - * @return an int from 0 to 59. null if the dateString is not a valid date - * string. - */ - public IntWritable evaluate(Text dateString) { + private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1]; + private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1]; + private final IntWritable output = new IntWritable(); - if (dateString == null) { - return null; + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + checkArgsSize(arguments, 1, 1); + checkArgPrimitive(arguments, 0); + switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) { + case INTERVAL_DAY_TIME: + inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME; + converters[0] = ObjectInspectorConverters.getConverter( + arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + obtainTimestampConverter(arguments, 0, inputTypes, converters); + break; + default: + // build error message + StringBuilder sb = new StringBuilder(); + sb.append(getFuncName()); + sb.append(" does not take "); + sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()); + sb.append(" type"); + throw new UDFArgumentTypeException(0, sb.toString()); } - try { - Date date = null; - try { - date = formatter1.parse(dateString.toString()); - } catch (ParseException e) { - date = formatter2.parse(dateString.toString()); - } - calendar.setTime(date); - result.set(calendar.get(Calendar.SECOND)); - return result; - } catch (ParseException e) { - return null; - } + ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector; + return outputOI; } - public IntWritable evaluate(TimestampWritable t) { - if (t == null) { - return null; + @Override + public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException { + switch (inputTypes[0]) { + case INTERVAL_DAY_TIME: + HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters); + if (intervalDayTime == null) { + return null; + } + output.set(intervalDayTime.getSeconds()); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + Timestamp ts = getTimestampValue(arguments, 0, converters); + if (ts == null) { + return null; + } + output.set(ts.getLocalDateTime().getSecond()); } - - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.SECOND)); - return result; + return output; } - public IntWritable evaluate(HiveIntervalDayTimeWritable i) { - if (i == null) { - return null; - } + @Override + protected String getFuncName() { + return "second"; + } - HiveIntervalDayTime idt = i.getHiveIntervalDayTime(); - result.set(idt.getSeconds()); - return result; + @Override + public String getDisplayString(String[] children) { + return getStandardDisplayString(getFuncName(), children); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java index 18ed52dc1f..9999a2fdc1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java @@ -18,11 +18,12 @@ package org.apache.hadoop.hive.ql.udf; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; +import java.time.DayOfWeek; +import java.time.temporal.IsoFields; +import java.time.temporal.WeekFields; +import java.util.Locale; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; @@ -49,14 +50,13 @@ @VectorizedExpressions({VectorUDFWeekOfYearDate.class, VectorUDFWeekOfYearString.class, VectorUDFWeekOfYearTimestamp.class}) @NDV(maxNdv = 52) public class UDFWeekOfYear extends UDF { - private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - private final Calendar calendar = Calendar.getInstance(); + + private final WeekFields weekFields; private final IntWritable result = new IntWritable(); public UDFWeekOfYear() { - calendar.setFirstDayOfWeek(Calendar.MONDAY); - calendar.setMinimalDaysInFirstWeek(4); + weekFields = WeekFields.of(DayOfWeek.MONDAY, 4); } /** @@ -73,11 +73,10 @@ public IntWritable evaluate(Text dateString) { return null; } try { - Date date = formatter.parse(dateString.toString()); - calendar.setTime(date); - result.set(calendar.get(Calendar.WEEK_OF_YEAR)); + Date date = Date.valueOf(dateString.toString()); + result.set(date.getLocalDate().get(weekFields.weekOfWeekBasedYear())); return result; - } catch (ParseException e) { + } catch (IllegalArgumentException e) { return null; } } @@ -87,8 +86,7 @@ public IntWritable evaluate(DateWritable d) { return null; } - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.WEEK_OF_YEAR)); + result.set(d.get().getLocalDate().get(weekFields.weekOfWeekBasedYear())); return result; } @@ -97,8 +95,7 @@ public IntWritable evaluate(TimestampWritable t) { return null; } - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.WEEK_OF_YEAR)); + result.set(t.getTimestamp().getLocalDateTime().get(IsoFields.WEEK_OF_WEEK_BASED_YEAR)); return result; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java index 84175913f3..1873b855ed 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java @@ -18,23 +18,23 @@ package org.apache.hadoop.hive.ql.udf; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.Description; -import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearDate; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearString; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.NDV; -import org.apache.hadoop.hive.serde2.io.DateWritable; -import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Text; /** * UDFYear. @@ -51,66 +51,78 @@ + " > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + " 2009") @VectorizedExpressions({VectorUDFYearDate.class, VectorUDFYearString.class, VectorUDFYearTimestamp.class}) @NDV(maxNdv = 20) // although technically its unbounded, its unlikely we will ever see ndv > 20 -public class UDFYear extends UDF { - private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - private final Calendar calendar = Calendar.getInstance(); - - private final IntWritable result = new IntWritable(); +public class UDFYear extends GenericUDF { - public UDFYear() { - } + private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1]; + private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1]; + private final IntWritable output = new IntWritable(); - /** - * Get the year from a date string. - * - * @param dateString - * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or - * "yyyy-MM-dd". - * @return an int from 1 to 12. null if the dateString is not a valid date - * string. - */ - public IntWritable evaluate(Text dateString) { - - if (dateString == null) { - return null; + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + checkArgsSize(arguments, 1, 1); + checkArgPrimitive(arguments, 0); + switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) { + case INTERVAL_YEAR_MONTH: + inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH; + converters[0] = ObjectInspectorConverters.getConverter( + arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + obtainDateConverter(arguments, 0, inputTypes, converters); + break; + default: + // build error message + StringBuilder sb = new StringBuilder(); + sb.append(getFuncName()); + sb.append(" does not take "); + sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()); + sb.append(" type"); + throw new UDFArgumentTypeException(0, sb.toString()); } - try { - Date date = formatter.parse(dateString.toString()); - calendar.setTime(date); - result.set(calendar.get(Calendar.YEAR)); - return result; - } catch (ParseException e) { - return null; - } + ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector; + return outputOI; } - public IntWritable evaluate(DateWritable d) { - if (d == null) { - return null; + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + switch (inputTypes[0]) { + case INTERVAL_YEAR_MONTH: + HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters); + if (intervalYearMonth == null) { + return null; + } + output.set(intervalYearMonth.getYears()); + break; + case STRING: + case CHAR: + case VARCHAR: + case DATE: + case TIMESTAMP: + case TIMESTAMPLOCALTZ: + case VOID: + Date date = getDateValue(arguments, 0, inputTypes, converters); + if (date == null) { + return null; + } + output.set(date.getLocalDate().getYear()); } - - calendar.setTime(d.get(false)); // Time doesn't matter. - result.set(calendar.get(Calendar.YEAR)); - return result; + return output; } - public IntWritable evaluate(TimestampWritable t) { - if (t == null) { - return null; - } - - calendar.setTime(t.getTimestamp()); - result.set(calendar.get(Calendar.YEAR)); - return result; + @Override + protected String getFuncName() { + return "year"; } - public IntWritable evaluate(HiveIntervalYearMonthWritable i) { - if (i == null) { - return null; - } - - result.set(i.getHiveIntervalYearMonth().getYears()); - return result; + @Override + public String getDisplayString(String[] children) { + return getStandardDisplayString(getFuncName(), children); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java index 5c67242850..cda0580f01 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java @@ -21,6 +21,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -34,8 +35,6 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import java.sql.Date; - public abstract class BaseMaskUDF extends GenericUDF { private static final Log LOG = LogFactory.getLog(BaseMaskUDF.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java index ca8bc8f42e..f2b9083258 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -46,7 +47,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.sql.Timestamp; import java.util.List; /** @@ -204,7 +204,7 @@ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveExcep case TIMESTAMP: Timestamp vTimeStamp = ((TimestampObjectInspector)inputOI). getPrimitiveJavaObject(parameters[0]); - bf.addLong(vTimeStamp.getTime()); + bf.addLong(vTimeStamp.getMillis()); break; case CHAR: Text vChar = ((HiveCharObjectInspector)inputOI). diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java index 710f0e88e1..beb00ed3c1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java @@ -20,12 +20,14 @@ import java.io.Closeable; import java.io.IOException; -import java.sql.Timestamp; import java.text.ParseException; -import java.util.Date; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -36,6 +38,8 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; @@ -45,7 +49,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; @@ -489,7 +492,7 @@ protected Double getDoubleValue(DeferredObject[] arguments, int i, Converter[] c } protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes, - Converter[] converters) throws HiveException { + Converter[] converters) throws HiveException { Object obj; if ((obj = arguments[i].get()) == null) { return null; @@ -501,11 +504,7 @@ protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory case VARCHAR: case CHAR: String dateStr = converters[i].convert(obj).toString(); - try { - date = DateUtils.getDateFormat().parse(dateStr); - } catch (ParseException e) { - throw new UDFArgumentException("Unparsable date: " + dateStr); - } + date = Date.valueOf(dateStr); break; case TIMESTAMP: case DATE: @@ -535,6 +534,58 @@ protected Timestamp getTimestampValue(DeferredObject[] arguments, int i, Convert return ts; } + protected HiveIntervalYearMonth getIntervalYearMonthValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes, + Converter[] converters) throws HiveException { + Object obj; + if ((obj = arguments[i].get()) == null) { + return null; + } + + HiveIntervalYearMonth intervalYearMonth; + switch (inputTypes[i]) { + case STRING: + case VARCHAR: + case CHAR: + String intervalYearMonthStr = converters[i].convert(obj).toString(); + intervalYearMonth = HiveIntervalYearMonth.valueOf(intervalYearMonthStr); + break; + case INTERVAL_YEAR_MONTH: + Object writableValue = converters[i].convert(obj); + intervalYearMonth = ((HiveIntervalYearMonthWritable) writableValue).getHiveIntervalYearMonth(); + break; + default: + throw new UDFArgumentTypeException(0, getFuncName() + + " only takes INTERVAL_YEAR_MONTH and STRING_GROUP types, got " + inputTypes[i]); + } + return intervalYearMonth; + } + + protected HiveIntervalDayTime getIntervalDayTimeValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes, + Converter[] converters) throws HiveException { + Object obj; + if ((obj = arguments[i].get()) == null) { + return null; + } + + HiveIntervalDayTime intervalDayTime; + switch (inputTypes[i]) { + case STRING: + case VARCHAR: + case CHAR: + String intervalDayTimeStr = converters[i].convert(obj).toString(); + intervalDayTime = HiveIntervalDayTime.valueOf(intervalDayTimeStr); + break; + case INTERVAL_DAY_TIME: + Object writableValue = converters[i].convert(obj); + intervalDayTime = ((HiveIntervalDayTimeWritable) writableValue).getHiveIntervalDayTime(); + break; + default: + throw new UDFArgumentTypeException(0, getFuncName() + + " only takes INTERVAL_DAY_TIME and STRING_GROUP types, got " + inputTypes[i]); + } + return intervalDayTime; + } + protected String getConstantStringValue(ObjectInspector[] arguments, int i) { Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue(); String str = constValue == null ? null : constValue.toString(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java index dae4b97b4a..417602a4cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java @@ -22,9 +22,7 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -34,7 +32,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import org.apache.hive.common.util.DateUtils; /** * GenericUDFAddMonths. @@ -53,7 +50,7 @@ public class GenericUDFAddMonths extends GenericUDF { private transient Converter[] converters = new Converter[2]; private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2]; - private final Calendar calendar = Calendar.getInstance(); + private final Date date = new Date(); private final Text output = new Text(); private transient Integer numMonthsConst; private transient boolean isNumMonthsConst; @@ -94,14 +91,13 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { } int numMonthInt = numMonthV.intValue(); - Date date = getDateValue(arguments, 0, inputTypes, converters); - if (date == null) { + Date d = getDateValue(arguments, 0, inputTypes, converters); + if (d == null) { return null; } - addMonth(date, numMonthInt); - Date newDate = calendar.getTime(); - output.set(DateUtils.getDateFormat().format(newDate)); + addMonth(d, numMonthInt); + output.set(date.toString()); return output; } @@ -115,23 +111,23 @@ protected String getFuncName() { return "add_months"; } - protected Calendar addMonth(Date d, int numMonths) { - calendar.setTime(d); + protected Date addMonth(Date d, int numMonths) { + date.setTimeInDays(d.getDays()); - boolean lastDatOfMonth = isLastDayOfMonth(calendar); + boolean lastDatOfMonth = isLastDayOfMonth(date); - calendar.add(Calendar.MONTH, numMonths); + date.setMonth(date.getLocalDate().getMonthValue() + numMonths); if (lastDatOfMonth) { - int maxDd = calendar.getActualMaximum(Calendar.DAY_OF_MONTH); - calendar.set(Calendar.DAY_OF_MONTH, maxDd); + int maxDd = date.getLocalDate().lengthOfMonth(); + date.setDayOfMonth(maxDd); } - return calendar; + return date; } - protected boolean isLastDayOfMonth(Calendar cal) { - int maxDd = cal.getActualMaximum(Calendar.DAY_OF_MONTH); - int dd = cal.get(Calendar.DAY_OF_MONTH); + protected boolean isLastDayOfMonth(Date d) { + int maxDd = d.getLocalDate().lengthOfMonth(); + int dd = d.getLocalDate().getDayOfMonth(); return dd == maxDd; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java index 7d3c3f46aa..d9cf75c370 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java index 9da51c84f5..741aaffb6a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -48,7 +49,9 @@ public ObjectInspector initialize(ObjectInspector[] arguments) } if (currentTimestamp == null) { - currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp()); + java.sql.Timestamp ts = SessionState.get().getQueryCurrentTimestamp(); + currentTimestamp = new TimestampWritable( + Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos())); } return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java index b73893d0bc..62b7fff2d0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java @@ -17,9 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -58,7 +57,7 @@ private transient PrimitiveObjectInspector argumentOI; private transient DateParser dateParser = new DateParser(); private transient final DateWritable output = new DateWritable(); - private transient final Date date = new Date(0); + private transient final Date date = new Date(); @Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { @@ -119,7 +118,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { case TIMESTAMP: Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())) .getTimestamp(); - output.set(DateWritable.millisToDays(ts.getTime())); + output.set(DateWritable.millisToDays(ts.getMillis())); break; case TIMESTAMPLOCALTZ: case DATE: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java index 8ba103beb6..bfbecb74b4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java @@ -17,9 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -65,7 +64,7 @@ @VectorizedExpressions({VectorUDFDateAddColScalar.class, VectorUDFDateAddScalarCol.class, VectorUDFDateAddColCol.class}) public class GenericUDFDateAdd extends GenericUDF { private transient final DateParser dateParser = new DateParser(); - private transient final Date dateVal = new Date(0); + private transient final Date dateVal = new Date(); private transient Converter dateConverter; private transient Converter daysConverter; private transient PrimitiveCategory inputType1; @@ -176,7 +175,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { case TIMESTAMP: Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get())) .getTimestamp(); - output.set(DateWritable.millisToDays(ts.getTime())); + output.set(DateWritable.millisToDays(ts.getMillis())); break; case DATE: DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java index e9cbcf7459..a1f671a9a3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java @@ -17,11 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -43,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.io.IntWritable; import javax.annotation.Nullable; @@ -65,7 +63,6 @@ + " 1") @VectorizedExpressions({VectorUDFDateDiffColScalar.class, VectorUDFDateDiffColCol.class, VectorUDFDateDiffScalarCol.class}) public class GenericUDFDateDiff extends GenericUDF { - private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); private transient Converter inputConverter1; private transient Converter inputConverter2; private IntWritable output = new IntWritable(); @@ -116,21 +113,25 @@ private Date convertToDate(PrimitiveCategory inputType, Converter converter, Def case CHAR: String dateString = converter.convert(argument.get()).toString(); try { - return new Date(formatter.parse(dateString).getTime()); - } catch (ParseException e) { + return Date.valueOf(dateString); + } catch (IllegalArgumentException e) { + Timestamp ts = PrimitiveObjectInspectorUtils.getTimestampFromString(dateString); + if (ts != null) { + return Date.ofEpochMilli(ts.getMillis()); + } return null; } case TIMESTAMP: Timestamp ts = ((TimestampWritable) converter.convert(argument.get())) .getTimestamp(); - return new Date(ts.getTime()); + return Date.ofEpochMilli(ts.getMillis()); case DATE: DateWritable dw = (DateWritable) converter.convert(argument.get()); return dw.get(); case TIMESTAMPLOCALTZ: TimestampTZ tsz = ((TimestampLocalTZWritable) converter.convert(argument.get())) .getTimestampTZ(); - return new Date(tsz.getEpochSecond() * 1000l); + return Date.ofEpochMilli(tsz.getEpochSecond() * 1000l); default: throw new UDFArgumentException( "TO_DATE() only takes STRING/TIMESTAMP/TIMESTAMPLOCALTZ types, got " + inputType); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java index 6b775d6595..fc3cc45de0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java @@ -21,8 +21,9 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP; import java.text.SimpleDateFormat; -import java.util.Date; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -97,13 +98,15 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { } // the function should support both short date and full timestamp format // time part of the timestamp should not be skipped - Date date = getTimestampValue(arguments, 0, tsConverters); - if (date == null) { + Timestamp ts = getTimestampValue(arguments, 0, tsConverters); + Date date; + if (ts == null) { date = getDateValue(arguments, 0, dtInputTypes, dtConverters); if (date == null) { return null; } } + date = Date.ofEpochMilli(getTimestampValue(arguments, 0, tsConverters).getMillis()); String res = formatter.format(date); if (res == null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java index 8691ed15e3..69229bbe7d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java @@ -17,13 +17,11 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; -import java.text.ParseException; -import java.text.SimpleDateFormat; import java.util.TimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.common.type.TimestampTZ; +import org.apache.hadoop.hive.common.type.TimestampTZUtil; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -34,6 +32,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Description(name = "from_utc_timestamp", value = "from_utc_timestamp(timestamp, string timezone) - " @@ -45,7 +45,6 @@ private transient PrimitiveObjectInspector[] argumentOIs; private transient TimestampConverter timestampConverter; private transient TextConverter textConverter; - private transient SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private transient TimeZone tzUTC = TimeZone.getTimeZone("UTC"); @Override @@ -70,26 +69,6 @@ public ObjectInspector initialize(ObjectInspector[] arguments) return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector; } - /** - * Parse the timestamp string using the input TimeZone. - * This does not parse fractional seconds. - * @param tsString - * @param tz - * @return - */ - protected Timestamp timestampFromString(String tsString, TimeZone tz) { - dateFormat.setTimeZone(tz); - try { - java.util.Date date = dateFormat.parse(tsString); - if (date == null) { - return null; - } - return new Timestamp(date.getTime()); - } catch (ParseException err) { - return null; - } - } - @Override public Object evaluate(DeferredObject[] arguments) throws HiveException { Object o0 = arguments[0].get(); @@ -123,21 +102,15 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { // inputTs is the year/month/day/hour/minute/second in the local timezone. // For this UDF we want it in the timezone represented by fromTz - Timestamp fromTs = timestampFromString(inputTs.toString(), fromTz); + TimestampTZ fromTs = TimestampTZUtil.parse(inputTs.toString(), fromTz.toZoneId()); if (fromTs == null) { return null; } // Now output this timestamp's millis value to the equivalent toTz. - dateFormat.setTimeZone(toTz); - Timestamp result = Timestamp.valueOf(dateFormat.format(fromTs)); - - if (inputTs.getNanos() != 0) { - result.setNanos(inputTs.getNanos()); - } - + Timestamp result = new Timestamp( + fromTs.getZonedDateTime().withZoneSameInstant(toTz.toZoneId()).toLocalDateTime()); return result; - } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java index d739af94f2..f4c8b013d7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -40,7 +41,6 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; -import java.sql.Timestamp; /** * GenericUDF to lookup a value in BloomFilter @@ -153,7 +153,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { case TIMESTAMP: Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector). getPrimitiveJavaObject(arguments[0].get()); - return bloomFilter.testLong(vTimeStamp.getTime()); + return bloomFilter.testLong(vTimeStamp.getMillis()); case CHAR: Text vChar = ((HiveCharObjectInspector) valObjectInspector). getPrimitiveWritableObject(arguments[0].get()).getStrippedValue(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java index 238eff91c6..fae5de04f1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java @@ -20,9 +20,8 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP; -import java.util.Calendar; -import java.util.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -48,7 +47,7 @@ public class GenericUDFLastDay extends GenericUDF { private transient Converter[] converters = new Converter[1]; private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1]; - private final Calendar calendar = Calendar.getInstance(); + private final Date date = new Date(); private final Text output = new Text(); @Override @@ -67,14 +66,13 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen @Override public Object evaluate(DeferredObject[] arguments) throws HiveException { - Date date = getDateValue(arguments, 0, inputTypes, converters); - if (date == null) { + Date d = getDateValue(arguments, 0, inputTypes, converters); + if (d == null) { return null; } - lastDay(date); - Date newDate = calendar.getTime(); - output.set(DateUtils.getDateFormat().format(newDate)); + lastDay(d); + output.set(date.toString()); return output; } @@ -88,10 +86,9 @@ protected String getFuncName() { return "last_day"; } - protected Calendar lastDay(Date d) { - calendar.setTime(d); - int maxDd = calendar.getActualMaximum(Calendar.DAY_OF_MONTH); - calendar.set(Calendar.DAY_OF_MONTH, maxDd); - return calendar; + protected Date lastDay(Date d) { + date.setTimeInDays(d.getDays()); + date.setDayOfMonth(date.getLocalDate().lengthOfMonth()); + return date; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java index bf2ec823b0..4c94164411 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java @@ -19,8 +19,7 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; @@ -218,11 +217,12 @@ Long transform(final Long value) { @Override Date transform(final Date value) { - int year = maskedYearValue == UNMASKED_VAL ? value.getYear() : maskedYearValue; - int month = maskedMonthValue == UNMASKED_VAL ? value.getMonth() : maskedMonthValue; - int day = maskedDayValue == UNMASKED_VAL ? value.getDate() : maskedDayValue; + int actualMonthValue = maskedMonthValue + 1; + int year = maskedYearValue == UNMASKED_VAL ? value.getLocalDate().getYear() : maskedYearValue; + int month = maskedMonthValue == UNMASKED_VAL ? value.getLocalDate().getMonthValue() : actualMonthValue; + int day = maskedDayValue == UNMASKED_VAL ? value.getLocalDate().getDayOfMonth() : maskedDayValue; - return new Date(year, month, day); + return Date.of(year, month, day); } protected int transformChar(final int c) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java index 8b1e988b42..a068541d36 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java @@ -18,9 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; - import org.apache.commons.codec.digest.DigestUtils; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java index d04e13533e..65d3502d42 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java @@ -18,19 +18,13 @@ package org.apache.hadoop.hive.ql.udf.generic; import static java.math.BigDecimal.ROUND_HALF_UP; -import static java.util.Calendar.DATE; -import static java.util.Calendar.HOUR_OF_DAY; -import static java.util.Calendar.MINUTE; -import static java.util.Calendar.MONTH; -import static java.util.Calendar.SECOND; -import static java.util.Calendar.YEAR; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP; import java.math.BigDecimal; -import java.util.Calendar; -import java.util.Date; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -63,8 +57,8 @@ private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2]; private transient Converter[] dtConverters = new Converter[2]; private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2]; - private final Calendar cal1 = Calendar.getInstance(); - private final Calendar cal2 = Calendar.getInstance(); + private final Date cal1 = new Date(); + private final Date cal2 = new Date(); private final DoubleWritable output = new DoubleWritable(); private boolean isRoundOffNeeded = true; @@ -103,37 +97,42 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen public Object evaluate(DeferredObject[] arguments) throws HiveException { // the function should support both short date and full timestamp format // time part of the timestamp should not be skipped - Date date1 = getTimestampValue(arguments, 0, tsConverters); - if (date1 == null) { + Timestamp ts1 = getTimestampValue(arguments, 0, tsConverters); + Date date1; + if (ts1 == null) { date1 = getDateValue(arguments, 0, dtInputTypes, dtConverters); if (date1 == null) { return null; } } + date1 = Date.ofEpochMilli(ts1.getMillis()); - Date date2 = getTimestampValue(arguments, 1, tsConverters); - if (date2 == null) { + Timestamp ts2 = getTimestampValue(arguments, 1, tsConverters); + Date date2; + if (ts2 == null) { date2 = getDateValue(arguments, 1, dtInputTypes, dtConverters); if (date2 == null) { return null; } } + date2 = Date.ofEpochMilli(ts2.getMillis()); - cal1.setTime(date1); - cal2.setTime(date2); + cal1.setTimeInDays(date1.getDays()); + cal2.setTimeInDays(date2.getDays()); // skip day/time part if both dates are end of the month // or the same day of the month - int monDiffInt = (cal1.get(YEAR) - cal2.get(YEAR)) * 12 + (cal1.get(MONTH) - cal2.get(MONTH)); - if (cal1.get(DATE) == cal2.get(DATE) - || (cal1.get(DATE) == cal1.getActualMaximum(DATE) && cal2.get(DATE) == cal2 - .getActualMaximum(DATE))) { + int monDiffInt = (cal1.getLocalDate().getYear() - cal2.getLocalDate().getYear()) * 12 + + (cal1.getLocalDate().getMonthValue() - cal2.getLocalDate().getMonthValue()); + if (cal1.getLocalDate().getDayOfMonth() == cal2.getLocalDate().getDayOfMonth() + || (cal1.getLocalDate().getDayOfMonth() == cal1.getLocalDate().lengthOfMonth() + && cal2.getLocalDate().getDayOfMonth() == cal2.getLocalDate().lengthOfMonth())) { output.set(monDiffInt); return output; } - int sec1 = getDayPartInSec(cal1); - int sec2 = getDayPartInSec(cal2); + long sec1 = cal1.getSeconds(); + long sec2 = cal2.getSeconds(); // 1 sec is 0.000000373 months (1/2678400). 1 month is 31 days. // there should be no adjustments for leap seconds @@ -146,15 +145,6 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { return output; } - protected int getDayPartInSec(Calendar cal) { - int dd = cal.get(DATE); - int HH = cal.get(HOUR_OF_DAY); - int mm = cal.get(MINUTE); - int ss = cal.get(SECOND); - int dayInSec = dd * 86400 + HH * 3600 + mm * 60 + ss; - return dayInSec; - } - @Override public String getDisplayString(String[] children) { return getStandardDisplayString(getFuncName(), children); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java index e74bae3d7d..3d82727594 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java @@ -28,9 +28,7 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +38,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import org.apache.hive.common.util.DateUtils; + +import java.util.Calendar; /** * GenericUDFNextDay. @@ -57,7 +56,7 @@ public class GenericUDFNextDay extends GenericUDF { private transient Converter[] converters = new Converter[2]; private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2]; - private final Calendar calendar = Calendar.getInstance(); + private final Date date = new Date(); private final Text output = new Text(); private transient int dayOfWeekIntConst; private transient boolean isDayOfWeekConst; @@ -98,14 +97,13 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { return null; } - Date date = getDateValue(arguments, 0, inputTypes, converters); - if (date == null) { + Date d = getDateValue(arguments, 0, inputTypes, converters); + if (d == null) { return null; } - nextDay(date, dayOfWeekInt); - Date newDate = calendar.getTime(); - output.set(DateUtils.getDateFormat().format(newDate)); + nextDay(d, dayOfWeekInt); + output.set(date.toString()); return output; } @@ -119,10 +117,10 @@ protected String getFuncName() { return "next_day"; } - protected Calendar nextDay(Date date, int dayOfWeek) { - calendar.setTime(date); + protected Date nextDay(Date d, int dayOfWeek) { + date.setTimeInDays(d.getDays()); - int currDayOfWeek = calendar.get(Calendar.DAY_OF_WEEK); + int currDayOfWeek = date.getLocalDate().getDayOfWeek().getValue(); int daysToAdd; if (currDayOfWeek < dayOfWeek) { @@ -131,9 +129,9 @@ protected Calendar nextDay(Date date, int dayOfWeek) { daysToAdd = 7 - currDayOfWeek + dayOfWeek; } - calendar.add(Calendar.DATE, daysToAdd); + date.setTimeInDays(date.getDays() + daysToAdd); - return calendar; + return date; } protected int getIntDayOfWeek(String dayOfWeek) throws UDFArgumentException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java index e1673b2c9f..65d70cf507 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java index a57b373983..ddde3fe1dd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java index 24068684f3..a538d95b31 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java @@ -21,9 +21,7 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP; -import java.util.Calendar; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -45,7 +43,6 @@ public class GenericUDFQuarter extends GenericUDF { private transient Converter[] converters = new Converter[1]; private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1]; - private final Calendar calendar = Calendar.getInstance(); private final IntWritable output = new IntWritable(); @Override @@ -65,8 +62,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { if (date == null) { return null; } - calendar.setTime(date); - int month = calendar.get(Calendar.MONTH); + int month = date.getLocalDate().getMonth().ordinal(); int quarter = (month + 3) / 3; output.set(quarter); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java index f0fcf69856..4ddcef62b0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java @@ -20,9 +20,9 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; -import java.sql.Timestamp; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java index 53dfae2d7f..009afa1ca9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java @@ -18,12 +18,11 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; -import org.apache.calcite.util.TimestampWithTimeZoneString; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -41,12 +40,10 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; /** * deterministic version of UDFUnixTimeStamp. enforces argument @@ -171,7 +168,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { } protected static void setValueFromTs(LongWritable value, Timestamp timestamp) { - value.set(timestamp.getTime() / 1000); + value.set(timestamp.getSeconds()); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java index 372db36f37..39d1bd0f3d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java @@ -20,13 +20,10 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.math.BigDecimal; -import java.sql.Timestamp; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -77,7 +74,6 @@ + " > SELECT _FUNC_(1234567891.1234567891);\n" + "OK\n" + " 1234567891") public class GenericUDFTrunc extends GenericUDF { - private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); private transient TimestampConverter timestampConverter; private transient Converter textConverter1; private transient Converter textConverter2; @@ -88,7 +84,7 @@ private transient Converter longConverter; private transient PrimitiveCategory inputType1; private transient PrimitiveCategory inputType2; - private final Calendar calendar = Calendar.getInstance(); + private final Date date = new Date(); private final Text output = new Text(); private transient String fmtInput; private transient PrimitiveObjectInspector inputOI; @@ -297,36 +293,35 @@ private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLength fmtInput = textConverter2.convert(arguments[1].get()).toString(); } - Date date; + Date d; switch (inputType1) { case STRING: String dateString = textConverter1.convert(arguments[0].get()).toString(); try { - date = formatter.parse(dateString.toString()); - } catch (ParseException e) { + d = Date.valueOf(dateString.toString()); + } catch (IllegalArgumentException e) { return null; } break; case TIMESTAMP: Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp(); - date = ts; + d = Date.ofEpochMilli(ts.getMillis()); break; case DATE: DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get()); - date = dw.get(); + d = dw.get(); break; default: throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1); } - if (evalDate(date) == null) { + if (evalDate(d) == null) { return null; } - Date newDate = calendar.getTime(); - output.set(formatter.format(newDate)); + output.set(date.toString()); return output; } @@ -427,22 +422,22 @@ public String getDisplayString(String[] children) { return getStandardDisplayString("trunc", children); } - private Calendar evalDate(Date d) throws UDFArgumentException { - calendar.setTime(d); + private Date evalDate(Date d) throws UDFArgumentException { + date.setTimeInDays(d.getDays()); if ("MONTH".equals(fmtInput) || "MON".equals(fmtInput) || "MM".equals(fmtInput)) { - calendar.set(Calendar.DAY_OF_MONTH, 1); - return calendar; + date.setDayOfMonth(1); + return date; } else if ("QUARTER".equals(fmtInput) || "Q".equals(fmtInput)) { - int month = calendar.get(Calendar.MONTH); + int month = date.getLocalDate().getMonthValue(); int quarter = month / 3; int monthToSet = quarter * 3; - calendar.set(Calendar.MONTH, monthToSet); - calendar.set(Calendar.DAY_OF_MONTH, 1); - return calendar; + date.setMonth(monthToSet); + date.setDayOfMonth(1); + return date; } else if ("YEAR".equals(fmtInput) || "YYYY".equals(fmtInput) || "YY".equals(fmtInput)) { - calendar.set(Calendar.MONTH, 0); - calendar.set(Calendar.DAY_OF_MONTH, 1); - return calendar; + date.setMonth(0); + date.setDayOfMonth(1); + return date; } else { return null; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java index 832983105f..557ab792ea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java @@ -20,6 +20,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -44,7 +45,7 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx } else { if (currentTimestamp == null) { currentTimestamp = new LongWritable(0); - setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp()); + setValueFromTs(currentTimestamp, Timestamp.ofEpochMilli(SessionState.get().getQueryCurrentTimestamp().getTime())); String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead."; SessionState.getConsole().printInfo(msg, false); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java index b440d8d848..e0d141e11c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.ql.udf.ptf; -import java.sql.Timestamp; -import java.util.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.PTFPartition; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -559,7 +558,7 @@ public boolean isDistanceGreater(Object v1, Object v2, int amt) { Date l2 = PrimitiveObjectInspectorUtils.getDate(v2, (PrimitiveObjectInspector) expressionDef.getOI()); if (l1 != null && l2 != null) { - return (double)(l1.getTime() - l2.getTime())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds + return (double)(l1.getMillis() - l2.getMillis())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds } return l1 != l2; // True if only one date is null } @@ -583,9 +582,9 @@ public TimestampValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderEx public boolean isDistanceGreater(Object v1, Object v2, int amt) { if (v1 != null && v2 != null) { long l1 = PrimitiveObjectInspectorUtils.getTimestamp(v1, - (PrimitiveObjectInspector) expressionDef.getOI()).getTime(); + (PrimitiveObjectInspector) expressionDef.getOI()).getMillis(); long l2 = PrimitiveObjectInspectorUtils.getTimestamp(v2, - (PrimitiveObjectInspector) expressionDef.getOI()).getTime(); + (PrimitiveObjectInspector) expressionDef.getOI()).getMillis(); return (double)(l1-l2)/1000 > amt; // TODO: lossy conversion, distance is considered in seconds } return v1 != null || v2 != null; // True if only one value is null diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java index 9a097afd56..16681a476c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java @@ -17,17 +17,17 @@ */ package org.apache.hadoop.hive.ql.util; -import java.sql.Date; -import java.sql.Timestamp; -import java.util.Calendar; -import java.util.TimeZone; -import java.util.concurrent.TimeUnit; - -import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hive.common.util.DateUtils; +import java.util.Calendar; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; + public class DateTimeMath { @@ -102,7 +102,7 @@ public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(ts, interval, tsResult); return tsResult; @@ -115,9 +115,8 @@ public boolean add(Timestamp ts, HiveIntervalYearMonth interval, Timestamp resul // Attempt to match Oracle semantics for timestamp arithmetic, // where timestamp arithmetic is done in UTC, then converted back to local timezone - long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths()); - result.setTime(resultMillis); - result.setNanos(ts.getNanos()); + long resultMillis = addMonthsToMillisUtc(ts.getMillis(), interval.getTotalMonths()); + result.setTimeInMillis(resultMillis, ts.getNanos()); return true; } @@ -127,7 +126,7 @@ public Timestamp add(HiveIntervalYearMonth interval, Timestamp ts) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(interval, ts, tsResult); return tsResult; @@ -140,9 +139,8 @@ public boolean add(HiveIntervalYearMonth interval, Timestamp ts, Timestamp resul // Attempt to match Oracle semantics for timestamp arithmetic, // where timestamp arithmetic is done in UTC, then converted back to local timezone - long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths()); - result.setTime(resultMillis); - result.setNanos(ts.getNanos()); + long resultMillis = addMonthsToMillisUtc(ts.getMillis(), interval.getTotalMonths()); + result.setTimeInMillis(resultMillis, ts.getNanos()); return true; } @@ -152,7 +150,7 @@ public Date add(Date dt, HiveIntervalYearMonth interval) { return null; } - Date dtResult = new Date(0); + Date dtResult = new Date(); add(dt, interval, dtResult); return dtResult; @@ -165,8 +163,8 @@ public boolean add(Date dt, HiveIntervalYearMonth interval, Date result) { // Since Date millis value is in local timezone representation, do date arithmetic // using local timezone so the time remains at the start of the day. - long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths()); - result.setTime(resultMillis); + long resultMillis = addMonthsToMillisLocal(dt.getMillis(), interval.getTotalMonths()); + result.setTimeInMillis(resultMillis); return true; } @@ -175,7 +173,7 @@ public Date add(HiveIntervalYearMonth interval, Date dt) { return null; } - Date dtResult = new Date(0); + Date dtResult = new Date(); add(interval, dt, dtResult); return dtResult; @@ -188,8 +186,8 @@ public boolean add(HiveIntervalYearMonth interval, Date dt, Date result) { // Since Date millis value is in local timezone representation, do date arithmetic // using local timezone so the time remains at the start of the day. - long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths()); - result.setTime(resultMillis); + long resultMillis = addMonthsToMillisLocal(dt.getMillis(), interval.getTotalMonths()); + result.setTimeInMillis(resultMillis); return true; } @@ -208,7 +206,7 @@ public Timestamp subtract(Timestamp left, HiveIntervalYearMonth right) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); subtract(left, right, tsResult); return tsResult; @@ -226,7 +224,7 @@ public Date subtract(Date left, HiveIntervalYearMonth right) { return null; } - Date dtResult = new Date(0); + Date dtResult = new Date(); subtract(left, right, dtResult); return dtResult; @@ -255,7 +253,7 @@ public Timestamp add(Timestamp ts, HiveIntervalDayTime interval) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(ts, interval, tsResult); return tsResult; @@ -269,10 +267,9 @@ public boolean add(Timestamp ts, HiveIntervalDayTime interval, nanosResult.addNanos(ts.getNanos(), interval.getNanos()); - long newMillis = ts.getTime() + long newMillis = ts.getMillis() + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds); - result.setTime(newMillis); - result.setNanos(nanosResult.nanos); + result.setTimeInMillis(newMillis, nanosResult.nanos); return true; } @@ -281,7 +278,7 @@ public Timestamp add(HiveIntervalDayTime interval, Timestamp ts) { return null; } - Timestamp tsResult = new Timestamp(0); + Timestamp tsResult = new Timestamp(); add(interval, ts, tsResult); return tsResult; } @@ -294,10 +291,9 @@ public boolean add(HiveIntervalDayTime interval, Timestamp ts, nanosResult.addNanos(ts.getNanos(), interval.getNanos()); - long newMillis = ts.getTime() + long newMillis = ts.getMillis() + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds); - result.setTime(newMillis); - result.setNanos(nanosResult.nanos); + result.setTimeInMillis(newMillis, nanosResult.nanos); return true; } @@ -373,8 +369,8 @@ public boolean subtract(Timestamp left, Timestamp right, nanosResult.addNanos(left.getNanos(), -(right.getNanos())); - long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime()) - - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds; + long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getMillis()) + - TimeUnit.MILLISECONDS.toSeconds(right.getMillis()) + nanosResult.seconds; result.set(totalSeconds, nanosResult.nanos); return true; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java index c8ae73a21d..f29fede1d1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java @@ -19,12 +19,12 @@ import static org.junit.Assert.assertEquals; import java.io.IOException; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; @@ -55,7 +55,7 @@ public void testSpillTimestamp() throws HiveException, SerDeException, IOExcepti ObjectInspectorUtils.getStandardObjectInspector(serde.getObjectInspector())); result.setTableDesc( PTFRowContainer.createTableDesc((StructObjectInspector) serde.getObjectInspector())); - TimestampWritable key = new TimestampWritable(new Timestamp(10)); + TimestampWritable key = new TimestampWritable(Timestamp.ofEpochMilli(10)); result.setKeyObject(Lists.newArrayList(key)); List row; // will trigger 2 spills diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java index f163289f51..27e834dbe2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java @@ -20,10 +20,10 @@ import org.junit.Test; -import java.sql.Timestamp; import java.util.Random; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.util.TimestampUtils; import static org.junit.Assert.*; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorHashKeyWrapperBatch.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorHashKeyWrapperBatch.java index e349fbd384..4bade176d1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorHashKeyWrapperBatch.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorHashKeyWrapperBatch.java @@ -18,20 +18,17 @@ package org.apache.hadoop.hive.ql.exec.vector; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.sql.Timestamp; - -import org.junit.Test; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromObjectIterables; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * Unit test for VectorHashKeyWrapperBatch class. @@ -61,13 +58,16 @@ public void testVectorHashKeyWrapperBatch() throws HiveException { // Cause Timestamp object to be replaced (in buggy code) with ZERO_TIMESTAMP. timestampColVector.noNulls = false; timestampColVector.isNull[0] = true; - Timestamp scratch = new Timestamp(2039); - Timestamp ts0 = new Timestamp(2039); - scratch.setTime(ts0.getTime()); + Timestamp scratch = new Timestamp(); + scratch.setTimeInMillis(2039); + Timestamp ts0 = new Timestamp(); + ts0.setTimeInMillis(2039); + scratch.setTimeInMillis(ts0.getMillis()); scratch.setNanos(ts0.getNanos()); timestampColVector.set(1, scratch); - Timestamp ts1 = new Timestamp(33222); - scratch.setTime(ts1.getTime()); + Timestamp ts1 = new Timestamp(); + ts1.setTimeInMillis(33222); + scratch.setTimeInMillis(ts1.getMillis()); scratch.setNanos(ts1.getNanos()); timestampColVector.set(2, scratch); batch.size = 3; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java index 3f993284f1..42f07c7e0c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java @@ -18,28 +18,28 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Random; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; @@ -74,12 +74,12 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.apache.hive.common.util.DateUtils; -import org.apache.hadoop.io.Text; import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.Text; +import org.apache.hive.common.util.DateUtils; -import com.google.common.base.Preconditions; import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; /** * Generate object inspector and random row object[]. diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java index ec5ad2327d..df309c847e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java @@ -19,11 +19,13 @@ package org.apache.hadoop.hive.ql.exec.vector; import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -56,8 +58,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java index 9db91beea6..0a3652f3c9 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java @@ -18,15 +18,22 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.junit.Assert; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.Random; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + import org.apache.commons.lang.ArrayUtils; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth; -import org.apache.hadoop.hive.ql.udf.UDFMonth; import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear; import org.apache.hadoop.hive.ql.udf.UDFYear; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -36,30 +43,19 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.junit.internal.runners.statements.Fail; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.List; -import java.util.Random; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadFactory; - public class TestVectorDateExpressions { private ExecutorService runner; /* copied over from VectorUDFTimestampFieldLong */ private TimestampWritable toTimestampWritable(long daysSinceEpoch) { - Timestamp ts = new Timestamp(DateWritable.daysToMillis((int) daysSinceEpoch)); + Timestamp ts = Timestamp.ofEpochMilli(DateWritable.daysToMillis((int) daysSinceEpoch)); return new TimestampWritable(ts); } @@ -111,8 +107,8 @@ private VectorizedRowBatch getVectorizedRowBatch(int[] inputs, int size) { private void compareToUDFYearDate(long t, int y) { UDFYear udf = new UDFYear(); TimestampWritable tsw = toTimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getYear(); + Assert.assertEquals(res, y); } private void verifyUDFYear(VectorizedRowBatch batch) { @@ -171,10 +167,9 @@ public void testVectorUDFYear() { } private void compareToUDFDayOfMonthDate(long t, int y) { - UDFDayOfMonth udf = new UDFDayOfMonth(); TimestampWritable tsw = toTimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getDayOfMonth(); + Assert.assertEquals(res, y); } private void verifyUDFDayOfMonth(VectorizedRowBatch batch) { @@ -233,10 +228,9 @@ public void testVectorUDFDayOfMonth() { } private void compareToUDFMonthDate(long t, int y) { - UDFMonth udf = new UDFMonth(); TimestampWritable tsw = toTimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getMonthValue(); + Assert.assertEquals(res, y); } private void verifyUDFMonth(VectorizedRowBatch batch) { @@ -309,7 +303,7 @@ private void compareToUDFUnixTimeStampDate(long t, long y) { LongWritable res = getLongWritable(tsw); if(res.get() != y) { System.out.printf("%d vs %d for %d, %d\n", res.get(), y, t, - tsw.getTimestamp().getTime()/1000); + tsw.getTimestamp().getSeconds()); } Assert.assertEquals(res.get(), y); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java index fe3c91cab4..24323cb9f2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Random; -import junit.framework.Assert; - import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; @@ -58,6 +56,8 @@ import org.apache.hadoop.io.Writable; import org.junit.Test; +import junit.framework.Assert; + /** * Unit tests for vector expression writers. */ @@ -115,7 +115,7 @@ private Writable getWritableValue(TypeInfo ti, long value) { } else if (ti.equals(TypeInfoFactory.booleanTypeInfo)) { return new BooleanWritable( value == 0 ? false : true); } else if (ti.equals(TypeInfoFactory.timestampTypeInfo)) { - Timestamp ts = new Timestamp(value); + Timestamp ts = Timestamp.ofEpochMilli(value); TimestampWritable tw = new TimestampWritable(ts); return tw; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java index 3e769bb780..5fae8a7330 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java @@ -22,10 +22,9 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.sql.Timestamp; - import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; @@ -595,7 +594,7 @@ public void testFilterTimestampBetween() { vrb.cols[0] = new TimestampColumnVector(); TimestampColumnVector lcv0 = (TimestampColumnVector) vrb.cols[0]; - Timestamp startTS = new Timestamp(0); // the epoch + Timestamp startTS = new Timestamp(); // the epoch Timestamp endTS = Timestamp.valueOf("2013-11-05 00:00:00.000000000"); Timestamp ts0 = Timestamp.valueOf("1963-11-06 00:00:00.000"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java index 9792951f7e..e875194a8f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -35,7 +36,6 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.sql.Date; -import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.List; @@ -88,7 +88,7 @@ private TimestampColumnVector toTimestamp(LongColumnVector date) { } private Timestamp toTimestamp(long date) { - return new Timestamp(DateWritable.daysToMillis((int) date)); + return Timestamp.ofEpochMilli(DateWritable.daysToMillis((int) date)); } private BytesColumnVector toString(LongColumnVector date) { @@ -479,7 +479,7 @@ public void testDateDiffScalarCol() throws HiveException { byte[] bytes = "error".getBytes(utf8); VectorizedRowBatch batch = new VectorizedRowBatch(2, 1); - udf = new VectorUDFDateDiffScalarCol(new Timestamp(0), 0, 1); + udf = new VectorUDFDateDiffScalarCol(new Timestamp(), 0, 1); udf.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.stringTypeInfo}); udf.transientInit(); batch.cols[0] = new BytesColumnVector(1); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java index a8f94e5002..c1585d77af 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java @@ -20,13 +20,11 @@ import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; import java.util.Arrays; import java.util.Random; -import junit.framework.Assert; - import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -60,6 +58,8 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.junit.Test; +import junit.framework.Assert; + public class TestVectorMathFunctions { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java index f6dbd672e9..eadcf9eeb9 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java @@ -20,19 +20,15 @@ import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; -import java.util.List; import java.util.Random; -import junit.framework.Assert; - -import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -51,10 +47,11 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.junit.Test; +import junit.framework.Assert; + /** * Unit tests for timestamp expressions. */ @@ -76,11 +73,11 @@ long before = exactly - 1000; long after = exactly + 1000; if (minYear != 0) { - boundaries.add(new Timestamp(before)); + boundaries.add(Timestamp.ofEpochMilli(before)); } - boundaries.add(new Timestamp(exactly)); + boundaries.add(Timestamp.ofEpochMilli(exactly)); if (year != maxYear) { - boundaries.add(new Timestamp(after)); + boundaries.add(Timestamp.ofEpochMilli(after)); } } return boundaries.toArray(new Timestamp[0]); @@ -191,7 +188,7 @@ private VectorizedRowBatch getVectorizedRowBatch(Timestamp[] inputs, int size, T private byte[] encodeTime(Timestamp timestamp) { ByteBuffer encoded; - long time = timestamp.getTime(); + long time = timestamp.getMillis(); try { String formatted = dateFormat.format(new Date(time)); encoded = Text.encode(formatted); @@ -203,7 +200,7 @@ private VectorizedRowBatch getVectorizedRowBatch(Timestamp[] inputs, int size, T private Timestamp decodeTime(byte[] time) { try { - return new Timestamp(dateFormat.parse(Text.decode(time)).getTime()); + return Timestamp.ofEpochMilli(dateFormat.parse(Text.decode(time)).getTime()); } catch (Exception e) { throw new RuntimeException(e); } @@ -227,12 +224,12 @@ private Timestamp readVectorElementAt(ColumnVector col, int i) { private void compareToUDFYearLong(Timestamp t, int y) { UDFYear udf = new UDFYear(); TimestampWritable tsw = new TimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - if (res.get() != y) { - System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(), - tsw.getTimestamp().getTime()/1000); + int res = tsw.getTimestamp().getLocalDateTime().getYear(); + if (res != y) { + System.out.printf("%d vs %d for %s, %d\n", res, y, t.toString(), + tsw.getTimestamp().getSeconds()); } - Assert.assertEquals(res.get(), y); + Assert.assertEquals(res, y); } private void verifyUDFYear(VectorizedRowBatch batch, TestType testType) @@ -265,7 +262,7 @@ private void verifyUDFYear(VectorizedRowBatch batch, TestType testType) } private void testVectorUDFYear(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -281,14 +278,14 @@ private void testVectorUDFYear(TestType testType) throws HiveException { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFYear(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -323,10 +320,9 @@ public void testVectorUDFYearString() throws HiveException { } private void compareToUDFDayOfMonthLong(Timestamp t, int y) { - UDFDayOfMonth udf = new UDFDayOfMonth(); TimestampWritable tsw = new TimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getDayOfMonth(); + Assert.assertEquals(res, y); } private void verifyUDFDayOfMonth(VectorizedRowBatch batch, TestType testType) @@ -359,7 +355,7 @@ private void verifyUDFDayOfMonth(VectorizedRowBatch batch, TestType testType) } private void testVectorUDFDayOfMonth(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -375,14 +371,14 @@ private void testVectorUDFDayOfMonth(TestType testType) throws HiveException { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFDayOfMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFDayOfMonth(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFDayOfMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -410,10 +406,9 @@ public void testVectorUDFDayOfMonthString() throws HiveException { } private void compareToUDFHourLong(Timestamp t, int y) { - UDFHour udf = new UDFHour(); TimestampWritable tsw = new TimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getHour(); + Assert.assertEquals(res, y); } private void verifyUDFHour(VectorizedRowBatch batch, TestType testType) throws HiveException { @@ -445,7 +440,7 @@ private void verifyUDFHour(VectorizedRowBatch batch, TestType testType) throws H } private void testVectorUDFHour(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -461,14 +456,14 @@ private void testVectorUDFHour(TestType testType) throws HiveException { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFHour(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFHour(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFHour(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -496,10 +491,9 @@ public void testVectorUDFHourString() throws HiveException { } private void compareToUDFMinuteLong(Timestamp t, int y) { - UDFMinute udf = new UDFMinute(); TimestampWritable tsw = new TimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getMinute(); + Assert.assertEquals(res, y); } private void verifyUDFMinute(VectorizedRowBatch batch, TestType testType) @@ -532,7 +526,7 @@ private void verifyUDFMinute(VectorizedRowBatch batch, TestType testType) } private void testVectorUDFMinute(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -548,14 +542,14 @@ private void testVectorUDFMinute(TestType testType) throws HiveException { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFMinute(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFMinute(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFMinute(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -583,10 +577,9 @@ public void testVectorUDFMinuteString() throws HiveException { } private void compareToUDFMonthLong(Timestamp t, int y) { - UDFMonth udf = new UDFMonth(); TimestampWritable tsw = new TimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getMonthValue(); + Assert.assertEquals(res, y); } private void verifyUDFMonth(VectorizedRowBatch batch, TestType testType) throws HiveException { @@ -618,7 +611,7 @@ private void verifyUDFMonth(VectorizedRowBatch batch, TestType testType) throws } private void testVectorUDFMonth(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -634,14 +627,14 @@ private void testVectorUDFMonth(TestType testType) throws HiveException { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFMonth(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFMonth(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -669,10 +662,9 @@ public void testVectorUDFMonthString() throws HiveException { } private void compareToUDFSecondLong(Timestamp t, int y) { - UDFSecond udf = new UDFSecond(); TimestampWritable tsw = new TimestampWritable(t); - IntWritable res = udf.evaluate(tsw); - Assert.assertEquals(res.get(), y); + int res = tsw.getTimestamp().getLocalDateTime().getSecond(); + Assert.assertEquals(res, y); } private void verifyUDFSecond(VectorizedRowBatch batch, TestType testType) throws HiveException { @@ -704,7 +696,7 @@ private void verifyUDFSecond(VectorizedRowBatch batch, TestType testType) throws } private void testVectorUDFSecond(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -720,14 +712,14 @@ private void testVectorUDFSecond(TestType testType) throws HiveException { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFSecond(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFSecond(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFSecond(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -755,7 +747,7 @@ public void testVectorUDFSecondString() throws HiveException { } private void compareToUDFUnixTimeStampLong(Timestamp ts, long y) { - long seconds = ts.getTime() / 1000; + long seconds = ts.getSeconds(); if(seconds != y) { System.out.printf("%d vs %d for %s\n", seconds, y, ts.toString()); Assert.assertTrue(false); @@ -792,7 +784,7 @@ private void verifyUDFUnixTimeStamp(VectorizedRowBatch batch, TestType testType) } private void testVectorUDFUnixTimeStamp(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -808,14 +800,14 @@ private void testVectorUDFUnixTimeStamp(TestType testType) throws HiveException TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFUnixTimeStamp(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFUnixTimeStamp(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFUnixTimeStamp(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; @@ -876,7 +868,7 @@ private void verifyUDFWeekOfYear(VectorizedRowBatch batch, TestType testType) } private void testVectorUDFWeekOfYear(TestType testType) throws HiveException { - VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, + VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, VectorizedRowBatch.DEFAULT_SIZE, testType); Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls); Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating); @@ -892,14 +884,14 @@ private void testVectorUDFWeekOfYear(TestType testType) throws HiveException { TestVectorizedRowBatch.addRandomNulls(batch.cols[1]); verifyUDFWeekOfYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; verifyUDFWeekOfYear(batch, testType); batch.cols[0].noNulls = false; batch.cols[0].isNull[0] = true; verifyUDFWeekOfYear(batch, testType); - batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType); + batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp()}, 1, testType); batch.cols[0].isRepeating = true; batch.selectedInUse = true; batch.selected = new int[] {42}; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java index 6aa6da93ca..f50569ae95 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java @@ -22,19 +22,13 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.io.UnsupportedEncodingException; import java.math.BigDecimal; -import java.math.MathContext; -import java.math.RoundingMode; -import java.sql.Timestamp; -import java.util.Arrays; import java.util.Random; import java.util.concurrent.TimeUnit; -import junit.framework.Assert; - import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; @@ -50,8 +44,11 @@ import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.ql.util.TimestampUtils; import org.junit.Test; +import junit.framework.Assert; + /** * Test VectorExpression classes for vectorized implementations of type casts. */ @@ -87,7 +84,7 @@ public void testCastDateToTimestamp() { expr.evaluate(b); for (int i = 0; i < intValues.length; i++) { Timestamp timestamp = resultV.asScratchTimestamp(i); - long actual = DateWritable.millisToDays(timestamp.getTime()); + long actual = DateWritable.millisToDays(timestamp.getMillis()); assertEquals(actual, intValues[i]); } } @@ -463,9 +460,8 @@ private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) { Random r = new Random(94830); for (int i = 0; i < doubleValues.length; i++) { long millis = RandomTypeUtil.randomMillis(r); - Timestamp ts = new Timestamp(millis); int nanos = RandomTypeUtil.randomNanos(r); - ts.setNanos(nanos); + Timestamp ts = Timestamp.ofEpochMilli(millis, nanos); TimestampWritable tsw = new TimestampWritable(ts); double asDouble = tsw.getDouble(); doubleValues[i] = asDouble; @@ -528,8 +524,7 @@ private VectorizedRowBatch getBatchTimestampDecimal(HiveDecimal[] hiveDecimalVal break; } long millis = RandomTypeUtil.randomMillis(r); - Timestamp ts = new Timestamp(millis); - ts.setNanos(optionalNanos); + Timestamp ts = Timestamp.ofEpochMilli(millis, optionalNanos); TimestampWritable tsw = new TimestampWritable(ts); hiveDecimalValues[i] = tsw.getHiveDecimal(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java index 441242563b..924ac8d057 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java @@ -19,21 +19,19 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; -import junit.framework.TestCase; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -63,7 +61,8 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; + +import junit.framework.TestCase; /** * (Copy of VerifyFast from serde). diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java index d90093f050..7ab420acf4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java @@ -18,15 +18,14 @@ package org.apache.hadoop.hive.ql.exec.vector.util; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.regex.MatchResult; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java index 3fe8b09ffa..5345b9845b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hive.ql.exec.vector.util; -import java.sql.Timestamp; import java.util.Random; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java index 1064b1986a..138a98b41a 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java @@ -18,21 +18,19 @@ package org.apache.hadoop.hive.ql.exec.vector.util.batchgen; -import java.sql.Timestamp; import java.util.Arrays; import java.util.Random; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory; -import org.apache.hadoop.io.BooleanWritable; -import org.apache.hadoop.io.Text; public class VectorColumnGroupGenerator { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java index ef678a8eb3..364ac14b0b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java @@ -28,8 +28,6 @@ import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -43,7 +41,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory; @@ -1315,7 +1315,7 @@ public void createOrcDateFile(Path file, int minYear, int maxYear new TimestampWritable(Timestamp.valueOf(year + "-05-05 12:34:56." + ms))); row.setFieldValue(1, - new DateWritable(new Date(year - 1900, 11, 25))); + new DateWritable(Date.of(year - 1900, 11, 25))); writer.addRow(row); } } @@ -1329,7 +1329,7 @@ public void createOrcDateFile(Path file, int minYear, int maxYear assertEquals(new TimestampWritable (Timestamp.valueOf(year + "-05-05 12:34:56." + ms)), row.getFieldValue(0)); - assertEquals(new DateWritable(new Date(year - 1900, 11, 25)), + assertEquals(new DateWritable(Date.of(year - 1900, 11, 25)), row.getFieldValue(1)); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java index 0c9c95d534..d9b03f4e4a 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java @@ -19,9 +19,7 @@ package org.apache.hadoop.hive.ql.io.orc; import java.io.File; -import java.sql.Date; -import java.sql.Timestamp; -import java.util.Calendar; +import java.time.LocalDateTime; import java.util.Random; import junit.framework.Assert; @@ -29,7 +27,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; @@ -133,7 +133,7 @@ public void createFile() throws Exception { for (int i = 0; i < 21000; ++i) { if ((i % 7) != 0) { writer.addRow(new MyRecord(((i % 3) == 0), (byte)(i % 5), i, (long) 200, (short) (300 + i), (double) (400 + i), - words[r1.nextInt(words.length)], new Timestamp(Calendar.getInstance().getTime().getTime()), + words[r1.nextInt(words.length)], new Timestamp(LocalDateTime.now()), Date.valueOf(dates[i % 3]), HiveDecimal.create(decimalStrings[i % decimalStrings.length]))); } else { writer.addRow(new MyRecord(null, null, i, (long) 200, null, null, null, null, null, null)); @@ -184,7 +184,7 @@ private void checkVectorizedReader() throws Exception { DateWritable adt = (DateWritable) a; long b = ((LongColumnVector) cv).vector[rowId]; - Assert.assertEquals(adt.get().getTime(), + Assert.assertEquals(adt.get().getMillis(), DateWritable.daysToMillis((int) b)); } else if (a instanceof HiveDecimalWritable) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java index a2304410b6..15927b50b5 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -62,7 +63,6 @@ import org.apache.parquet.schema.MessageType; import java.io.IOException; -import java.sql.Timestamp; import java.util.Arrays; import java.util.List; @@ -209,7 +209,9 @@ protected static boolean getBooleanValue( } protected static NanoTime getNanoTime(int index) { - return NanoTimeUtils.getNanoTime(new Timestamp(index), false); + Timestamp ts = new Timestamp(); + ts.setTimeInMillis(index); + return NanoTimeUtils.getNanoTime(ts, false); } protected static HiveDecimal getDecimal( @@ -376,8 +378,13 @@ protected void timestampRead(boolean isDictionaryEncoding) throws InterruptedExc if (c == nElements) { break; } - Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(c); - assertEquals("Not the same time at " + c, expected.getTime(), vector.getTime(i)); + Timestamp expected = new Timestamp(); + if (isDictionaryEncoding) { + expected.setTimeInMillis(c % UNIQUE_NUM); + } else { + expected.setTimeInMillis(c); + } + assertEquals("Not the same time at " + c, expected.getMillis(), vector.getTime(i)); assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i)); assertFalse(vector.isNull[i]); c++; @@ -408,8 +415,12 @@ protected void stringReadTimestamp(boolean isDictionaryEncoding) throws Interrup break; } - Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp( - c); + Timestamp expected = new Timestamp(); + if (isDictionaryEncoding) { + expected.setTimeInMillis(c % UNIQUE_NUM); + } else { + expected.setTimeInMillis(c); + }; String actual = new String(Arrays .copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i])); assertEquals("Not the same time at " + c, expected.toString(), actual); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java index d14f0a99ac..0d0057e080 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java @@ -13,19 +13,18 @@ */ package org.apache.hadoop.hive.ql.io.parquet.serde; -import java.sql.Timestamp; import java.util.Calendar; -import java.util.Date; import java.util.GregorianCalendar; import java.util.TimeZone; import java.util.concurrent.TimeUnit; -import junit.framework.Assert; -import junit.framework.TestCase; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; +import junit.framework.Assert; +import junit.framework.TestCase; + /** @@ -42,7 +41,7 @@ public void testJulianDay() { cal.set(Calendar.HOUR_OF_DAY, 0); cal.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts = new Timestamp(cal.getTimeInMillis()); + Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis()); NanoTime nt = NanoTimeUtils.getNanoTime(ts, false); Assert.assertEquals(nt.getJulianDay(), 2440000); @@ -57,7 +56,7 @@ public void testJulianDay() { cal1.set(Calendar.HOUR_OF_DAY, 0); cal1.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts1 = new Timestamp(cal1.getTimeInMillis()); + Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis()); NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, false); Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false); @@ -70,7 +69,7 @@ public void testJulianDay() { cal2.set(Calendar.HOUR_OF_DAY, 0); cal2.setTimeZone(TimeZone.getTimeZone("UTC")); - Timestamp ts2 = new Timestamp(cal2.getTimeInMillis()); + Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis()); NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, false); Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false); @@ -86,7 +85,7 @@ public void testJulianDay() { cal1.set(Calendar.HOUR_OF_DAY, 0); cal1.setTimeZone(TimeZone.getTimeZone("GMT")); - ts1 = new Timestamp(cal1.getTimeInMillis()); + ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis()); nt1 = NanoTimeUtils.getNanoTime(ts1, false); ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false); @@ -99,7 +98,7 @@ public void testJulianDay() { cal2.set(Calendar.HOUR_OF_DAY, 0); cal2.setTimeZone(TimeZone.getTimeZone("UTC")); - ts2 = new Timestamp(cal2.getTimeInMillis()); + ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis()); nt2 = NanoTimeUtils.getNanoTime(ts2, false); ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false); @@ -117,8 +116,7 @@ public void testNanos() { cal.set(Calendar.MINUTE, 1); cal.set(Calendar.SECOND, 1); cal.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts = new Timestamp(cal.getTimeInMillis()); - ts.setNanos(1); + Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1); //(1*60*60 + 1*60 + 1) * 10e9 + 1 NanoTime nt = NanoTimeUtils.getNanoTime(ts, false); @@ -133,8 +131,7 @@ public void testNanos() { cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.setTimeZone(TimeZone.getTimeZone("GMT")); - ts = new Timestamp(cal.getTimeInMillis()); - ts.setNanos(999999999); + ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 999999999); //(23*60*60 + 59*60 + 59)*10e9 + 999999999 nt = NanoTimeUtils.getNanoTime(ts, false); @@ -149,8 +146,7 @@ public void testNanos() { cal2.set(Calendar.MINUTE, 10); cal2.set(Calendar.SECOND, 0); cal2.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts2 = new Timestamp(cal2.getTimeInMillis()); - ts2.setNanos(10); + Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis(), 10); Calendar cal1 = Calendar.getInstance(); cal1.set(Calendar.YEAR, 1968); @@ -160,8 +156,7 @@ public void testNanos() { cal1.set(Calendar.MINUTE, 0); cal1.set(Calendar.SECOND, 0); cal1.setTimeZone(TimeZone.getTimeZone("GMT")); - Timestamp ts1 = new Timestamp(cal1.getTimeInMillis()); - ts1.setNanos(1); + Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis(), 1); NanoTime n2 = NanoTimeUtils.getNanoTime(ts2, false); NanoTime n1 = NanoTimeUtils.getNanoTime(ts1, false); @@ -183,8 +178,7 @@ public void testTimezone() { cal.set(Calendar.MINUTE, 1); cal.set(Calendar.SECOND, 1); cal.setTimeZone(TimeZone.getTimeZone("US/Pacific")); - Timestamp ts = new Timestamp(cal.getTimeInMillis()); - ts.setNanos(1); + Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1); /** * 17:00 PDT = 00:00 GMT (daylight-savings) diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java index 406ceceed1..915d040f17 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java @@ -19,10 +19,10 @@ import static org.junit.Assert.*; -import java.sql.Date; import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.junit.Test; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java index 9f20ff656b..e1f970673c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hive.ql.udf; -import java.sql.Timestamp; import java.time.Instant; import java.time.ZoneId; -import java.time.ZoneOffset; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; @@ -37,56 +36,56 @@ public void testTimestampToTimestampWithGranularity() throws Exception { // Running example // Friday 30th August 1985 02:47:02 AM - final TimestampWritable t = new TimestampWritable(new Timestamp(494243222000L)); + final TimestampWritable t = new TimestampWritable(Timestamp.ofEpochMilli(494243222000L)); UDFDateFloor g; // Year granularity // Tuesday 1st January 1985 12:00:00 AM g = new UDFDateFloorYear(); TimestampWritable i1 = g.evaluate(t); - assertEquals(473414400000L, i1.getTimestamp().getTime()); + assertEquals(473414400000L, i1.getTimestamp().getMillis()); // Quarter granularity // Monday 1st July 1985 12:00:00 AM g = new UDFDateFloorQuarter(); TimestampWritable i2 = g.evaluate(t); - assertEquals(489049200000L, i2.getTimestamp().getTime()); + assertEquals(489049200000L, i2.getTimestamp().getMillis()); // Month granularity // Thursday 1st August 1985 12:00:00 AM g = new UDFDateFloorMonth(); TimestampWritable i3 = g.evaluate(t); - assertEquals(491727600000L, i3.getTimestamp().getTime()); + assertEquals(491727600000L, i3.getTimestamp().getMillis()); // Week granularity // Monday 26th August 1985 12:00:00 AM g = new UDFDateFloorWeek(); TimestampWritable i4 = g.evaluate(t); - assertEquals(493887600000L, i4.getTimestamp().getTime()); + assertEquals(493887600000L, i4.getTimestamp().getMillis()); // Day granularity // Friday 30th August 1985 12:00:00 AM g = new UDFDateFloorDay(); TimestampWritable i5 = g.evaluate(t); - assertEquals(494233200000L, i5.getTimestamp().getTime()); + assertEquals(494233200000L, i5.getTimestamp().getMillis()); // Hour granularity // Friday 30th August 1985 02:00:00 AM g = new UDFDateFloorHour(); TimestampWritable i6 = g.evaluate(t); - assertEquals(494240400000L, i6.getTimestamp().getTime()); + assertEquals(494240400000L, i6.getTimestamp().getMillis()); // Minute granularity // Friday 30th August 1985 02:47:00 AM g = new UDFDateFloorMinute(); TimestampWritable i7 = g.evaluate(t); - assertEquals(494243220000L, i7.getTimestamp().getTime()); + assertEquals(494243220000L, i7.getTimestamp().getMillis()); // Second granularity // Friday 30th August 1985 02:47:02 AM g = new UDFDateFloorSecond(); TimestampWritable i8 = g.evaluate(t); - assertEquals(494243222000L, i8.getTimestamp().getTime()); + assertEquals(494243222000L, i8.getTimestamp().getMillis()); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java index 0acb46db16..26876cb434 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java @@ -18,22 +18,22 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDate; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDate extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDate udf = new GenericUDFDate(); @@ -59,8 +59,8 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI}; udf.initialize(arguments); - DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 30, 4, 17, 52, 0))); + DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 30, 4, 17, 52, 0)))); DeferredObject[] args = {valueObj}; DateWritable output = (DateWritable) udf.evaluate(args); @@ -78,7 +78,7 @@ public void testDateWritablepToDate() throws HiveException { ObjectInspector[] arguments = {valueOI}; udf.initialize(arguments); - DeferredObject valueObj = new DeferredJavaObject(new DateWritable(new Date(109, 06, 30))); + DeferredObject valueObj = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 30))); DeferredObject[] args = {valueObj}; DateWritable output = (DateWritable) udf.evaluate(args); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java index 9caf3b77e6..952c2274a0 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java @@ -18,21 +18,21 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDateAdd extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDateAdd udf = new GenericUDFDateAdd(); @@ -66,8 +66,8 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 20, 4, 17, 52, 0))); + DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0)))); DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); @@ -93,7 +93,7 @@ public void testDateWritablepToDate() throws HiveException { udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20))); + DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 20))); DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); @@ -118,7 +118,7 @@ public void testByteDataTypeAsDays() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20))); + DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 20))); DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); @@ -133,7 +133,7 @@ public void testShortDataTypeAsDays() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20))); + DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 20))); DeferredObject valueObj2 = new DeferredJavaObject(new Short("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java index 3f4ea3f988..ced07b1eeb 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java @@ -18,15 +18,13 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -34,6 +32,8 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDateDiff extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDateDiff udf = new GenericUDFDateDiff(); @@ -67,10 +67,10 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 20, 0, 0, 0, 0))); - DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 17, 0, 0, 0, 0))); + DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 20, 0, 0, 0, 0)))); + DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 17, 0, 0, 0, 0)))); DeferredObject[] args = {valueObj1, valueObj2}; IntWritable output = (IntWritable) udf.evaluate(args); @@ -95,8 +95,8 @@ public void testDateWritablepToDate() throws HiveException { udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20))); - DeferredObject valueObj2 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 10))); + DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 20))); + DeferredObject valueObj2 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 10))); DeferredObject[] args = {valueObj1, valueObj2}; IntWritable output = (IntWritable) udf.evaluate(args); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java index d29d964bd0..a630e9f772 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java index cb00cfd189..21dcc04f2b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java @@ -18,21 +18,21 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - -import junit.framework.TestCase; +import java.time.LocalDateTime; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFDateSub extends TestCase { public void testStringToDate() throws HiveException { GenericUDFDateSub udf = new GenericUDFDateSub(); @@ -66,8 +66,8 @@ public void testTimestampToDate() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, - 20, 4, 17, 52, 0))); + DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable( + new Timestamp(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0)))); DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); @@ -93,7 +93,7 @@ public void testDateWritablepToDate() throws HiveException { udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20))); + DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 20))); DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); @@ -118,7 +118,7 @@ public void testByteDataTypeAsDays() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20))); + DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 20))); DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); @@ -133,7 +133,7 @@ public void testShortDataTypeAsDays() throws HiveException { ObjectInspector[] arguments = {valueOI1, valueOI2}; udf.initialize(arguments); - DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20))); + DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(Date.of(109, 06, 20))); DeferredObject valueObj2 = new DeferredJavaObject(new Short("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritable output = (DateWritable) udf.evaluate(args); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java index 4677aa7409..50b479bb93 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.udf.generic; import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; @@ -32,8 +33,6 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import java.sql.Date; - public class TestGenericUDFGreatest extends TestCase { public void testOneArg() throws HiveException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java index 7d7c84da72..41db2032ab 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java @@ -17,10 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Timestamp; - -import junit.framework.TestCase; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; @@ -29,6 +26,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import junit.framework.TestCase; + public class TestGenericUDFLastDay extends TestCase { public void testLastDay() throws HiveException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java index f966cb06d3..dee1797e7f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; - import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java index 7eee550688..4ef12881c2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java @@ -17,9 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java index efc951483c..ae855c5d6b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java @@ -18,12 +18,11 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java index 5350a00e73..d2d22b9841 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java @@ -18,12 +18,11 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java index 4b09aa15d5..fc090bf492 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java index fcdb49cc27..13cca22ef0 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import static java.util.Arrays.asList; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -93,22 +93,22 @@ public void testSortStruct() throws HiveException { udf.initialize(inputOIs); Object i1 = asList(new Text("a"), new DoubleWritable(3.1415), - new DateWritable(new Date(2015, 5, 26)), + new DateWritable(Date.of(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4))); Object i2 = asList(new Text("b"), new DoubleWritable(3.14), - new DateWritable(new Date(2015, 5, 26)), + new DateWritable(Date.of(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4))); Object i3 = asList(new Text("a"), new DoubleWritable(3.1415), - new DateWritable(new Date(2015, 5, 25)), + new DateWritable(Date.of(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(5))); Object i4 = asList(new Text("a"), new DoubleWritable(3.1415), - new DateWritable(new Date(2015, 5, 25)), + new DateWritable(Date.of(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4))); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java index d840238ce5..f83bfe39c2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java @@ -18,9 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; @@ -66,12 +65,12 @@ public void testTimestamp() throws HiveException { Timestamp ts = Timestamp.valueOf("1970-01-01 00:00:00"); runAndVerify(udf, new TimestampWritable(ts), - new LongWritable(ts.getTime() / 1000)); + new LongWritable(ts.getSeconds())); ts = Timestamp.valueOf("2001-02-03 01:02:03"); runAndVerify(udf, new TimestampWritable(ts), - new LongWritable(ts.getTime() / 1000)); + new LongWritable(ts.getSeconds())); // test null values runAndVerify(udf, null, null); @@ -86,7 +85,7 @@ public void testDate() throws HiveException { Date date = Date.valueOf("1970-01-01"); runAndVerify(udf, new DateWritable(date), - new LongWritable(date.getTime() / 1000)); + new LongWritable(date.getSeconds())); // test null values runAndVerify(udf, null, null); @@ -101,7 +100,7 @@ public void testString() throws HiveException { String val = "2001-01-01 01:02:03"; runAndVerify(udf1, new Text(val), - new LongWritable(Timestamp.valueOf(val).getTime() / 1000)); + new LongWritable(Timestamp.valueOf(val).getSeconds())); // test null values runAndVerify(udf1, null, null); @@ -116,7 +115,7 @@ public void testString() throws HiveException { runAndVerify(udf2, new Text(val), new Text(format), - new LongWritable(Date.valueOf(val).getTime() / 1000)); + new LongWritable(Date.valueOf(val).getSeconds())); // test null values runAndVerify(udf2, null, null, null); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java index 0d524d31f3..a7389e949b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java @@ -18,9 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import java.sql.Date; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java index 85bb9b3015..4005fe42ef 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hive.ql.util; -import java.sql.Date; -import java.sql.Timestamp; import java.util.TimeZone; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.junit.*; diff --git a/ql/src/test/queries/clientpositive/date_udf.q b/ql/src/test/queries/clientpositive/date_udf.q index 3b8226554f..caf120e1af 100644 --- a/ql/src/test/queries/clientpositive/date_udf.q +++ b/ql/src/test/queries/clientpositive/date_udf.q @@ -20,7 +20,7 @@ create table date_udf_flight ( LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight; -- Test UDFs with date input -select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), +select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), weekofyear(d), to_date(d) from date_udf; diff --git a/ql/src/test/queries/clientpositive/localtimezone.q b/ql/src/test/queries/clientpositive/localtimezone.q index 27b036bab3..7456972aa2 100644 --- a/ql/src/test/queries/clientpositive/localtimezone.q +++ b/ql/src/test/queries/clientpositive/localtimezone.q @@ -5,35 +5,35 @@ drop table `timestamptz_test`; create table `date_test` (`mydate1` date); insert into `date_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz'); + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)); create table `timestamp_test` (`mydate1` timestamp); insert into `timestamp_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz'); + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)); create table `timestamptz_test` (`mydate1` timestamp with local time zone); insert into `timestamptz_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz'); + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)); select * from `date_test`; select * from `timestamp_test`; diff --git a/ql/src/test/queries/clientpositive/timestamp_comparison2.q b/ql/src/test/queries/clientpositive/timestamp_comparison2.q index d41cc83dbe..a93849c109 100644 --- a/ql/src/test/queries/clientpositive/timestamp_comparison2.q +++ b/ql/src/test/queries/clientpositive/timestamp_comparison2.q @@ -16,8 +16,8 @@ FROM alltypesorc WHERE ((ctinyint != 0) AND - (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) + (((ctimestamp1 <= timestamp('1970-01-01 00:00:00')) OR ((ctinyint = cint) OR (cstring2 LIKE 'ss'))) AND ((988888 < cdouble) - OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble))))) + OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble))))) ; diff --git a/ql/src/test/queries/clientpositive/timestamp_dst.q b/ql/src/test/queries/clientpositive/timestamp_dst.q new file mode 100644 index 0000000000..4dda5a9d28 --- /dev/null +++ b/ql/src/test/queries/clientpositive/timestamp_dst.q @@ -0,0 +1,2 @@ +select TIMESTAMP '2015-03-08 02:10:00.101'; + diff --git a/ql/src/test/queries/clientpositive/vectorization_decimal_date.q b/ql/src/test/queries/clientpositive/vectorization_decimal_date.q index 29c025c632..2ac5f96178 100644 --- a/ql/src/test/queries/clientpositive/vectorization_decimal_date.q +++ b/ql/src/test/queries/clientpositive/vectorization_decimal_date.q @@ -3,5 +3,6 @@ set hive.fetch.task.conversion=none; CREATE TABLE date_decimal_test STORED AS ORC AS SELECT cint, cdouble, CAST (CAST (cint AS TIMESTAMP) AS DATE) AS cdate, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal FROM alltypesorc; SET hive.vectorized.execution.enabled=true; -EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10; -SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10; +EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10; +-- 528534767 is 'Wednesday, January 7, 1970 2:48:54 AM' +SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10; diff --git a/ql/src/test/results/clientpositive/constprog_type.q.out b/ql/src/test/results/clientpositive/constprog_type.q.out index 6c300855a1..5814b93f1f 100644 --- a/ql/src/test/results/clientpositive/constprog_type.q.out +++ b/ql/src/test/results/clientpositive/constprog_type.q.out @@ -35,7 +35,7 @@ STAGE PLANS: Row Limit Per Split: 1 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: DATE'2013-11-17' (type: date), TIMESTAMP'2011-04-29 20:46:56.4485' (type: timestamp) + expressions: DATE'2013-11-17' (type: date), TIMESTAMP'2011-04-30 03:46:56.4485' (type: timestamp) outputColumnNames: _col0, _col1 Statistics: Num rows: 500 Data size: 48000 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator @@ -122,7 +122,7 @@ POSTHOOK: query: SELECT * FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 #### A masked pattern was here #### -2013-11-17 2011-04-29 20:46:56.4485 +2013-11-17 2011-04-30 03:46:56.4485 PREHOOK: query: SELECT key, value FROM src WHERE key = cast(86 as double) PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git a/ql/src/test/results/clientpositive/date_udf.q.out b/ql/src/test/results/clientpositive/date_udf.q.out index 37ad29e8ed..7681a50353 100644 --- a/ql/src/test/results/clientpositive/date_udf.q.out +++ b/ql/src/test/results/clientpositive/date_udf.q.out @@ -74,19 +74,19 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OV POSTHOOK: type: LOAD #### A masked pattern was here #### POSTHOOK: Output: default@date_udf_flight -PREHOOK: query: select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), +PREHOOK: query: select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), weekofyear(d), to_date(d) from date_udf PREHOOK: type: QUERY PREHOOK: Input: default@date_udf #### A masked pattern was here #### -POSTHOOK: query: select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), +POSTHOOK: query: select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d), weekofyear(d), to_date(d) from date_udf POSTHOOK: type: QUERY POSTHOOK: Input: default@date_udf #### A masked pattern was here #### -1304665200 2011 5 6 6 18 2011-05-06 +1304665200 1304640000 2011 5 6 6 18 2011-05-06 PREHOOK: query: select date_add(d, 5), date_sub(d, 10) from date_udf PREHOOK: type: QUERY diff --git a/ql/src/test/results/clientpositive/decimal_1.q.out b/ql/src/test/results/clientpositive/decimal_1.q.out index 17090f7059..7d8f5e4df3 100644 --- a/ql/src/test/results/clientpositive/decimal_1.q.out +++ b/ql/src/test/results/clientpositive/decimal_1.q.out @@ -120,7 +120,7 @@ POSTHOOK: query: select cast(t as timestamp) from decimal_1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -1969-12-31 16:00:17.29 +1970-01-01 00:00:17.29 PREHOOK: query: drop table decimal_1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_1 diff --git a/ql/src/test/results/clientpositive/decimal_2.q.out b/ql/src/test/results/clientpositive/decimal_2.q.out index f3168f6f50..826431dc70 100644 --- a/ql/src/test/results/clientpositive/decimal_2.q.out +++ b/ql/src/test/results/clientpositive/decimal_2.q.out @@ -210,7 +210,7 @@ POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_2 #### A masked pattern was here #### -1355944339.1234567 +1355915539.1234567 PREHOOK: query: select cast(true as decimal) from decimal_2 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_2 diff --git a/ql/src/test/results/clientpositive/localtimezone.q.out b/ql/src/test/results/clientpositive/localtimezone.q.out index a76e4a3db4..b4c6d86247 100644 --- a/ql/src/test/results/clientpositive/localtimezone.q.out +++ b/ql/src/test/results/clientpositive/localtimezone.q.out @@ -19,24 +19,24 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@date_test PREHOOK: query: insert into `date_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz') + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: default@date_test POSTHOOK: query: insert into `date_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz') + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)) POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@date_test @@ -50,24 +50,24 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@timestamp_test PREHOOK: query: insert into `timestamp_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz') + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: default@timestamp_test POSTHOOK: query: insert into `timestamp_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz') + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)) POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@timestamp_test @@ -81,24 +81,24 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@timestamptz_test PREHOOK: query: insert into `timestamptz_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz') + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: default@timestamptz_test POSTHOOK: query: insert into `timestamptz_test` VALUES - ('2011-01-01 01:01:01.123'), - ('2011-01-01 01:01:01.123 Europe/Rome'), - ('2011-01-01 01:01:01.123 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912'), - ('2011-01-01 01:01:01.12345678912 Europe/Rome'), - ('2011-01-01 01:01:01.12345678912 GMT-05:00'), - ('2011-01-01 01:01:01.12345678912 xyz') + (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)), + (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone)) POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@timestamptz_test diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out index 485bfe7506..5a8c069b98 100644 --- a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out +++ b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out @@ -89,13 +89,13 @@ POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test_parquet where cin POSTHOOK: type: QUERY POSTHOOK: Input: default@date_decimal_test_parquet #### A masked pattern was here #### -1970-01-06 -7959.5837837838 -1970-01-06 -2516.4135135135 -1970-01-06 -9445.0621621622 -1970-01-06 -5713.7459459459 -1970-01-06 8963.6405405405 -1970-01-06 4193.6243243243 -1970-01-06 2964.3864864865 -1970-01-06 -4673.2540540541 -1970-01-06 -9216.8945945946 -1970-01-06 -9287.3756756757 +1970-01-07 -7959.5837837838 +1970-01-07 -2516.4135135135 +1970-01-07 -9445.0621621622 +1970-01-07 -5713.7459459459 +1970-01-07 8963.6405405405 +1970-01-07 4193.6243243243 +1970-01-07 2964.3864864865 +1970-01-07 -4673.2540540541 +1970-01-07 -9216.8945945946 +1970-01-07 -9287.3756756757 diff --git a/ql/src/test/results/clientpositive/timestamp_1.q.out b/ql/src/test/results/clientpositive/timestamp_1.q.out index d3ca5cfdf6..1b88d3da47 100644 --- a/ql/src/test/results/clientpositive/timestamp_1.q.out +++ b/ql/src/test/results/clientpositive/timestamp_1.q.out @@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.2938724611E9 +1.2938436611E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 @@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_1 #### A masked pattern was here #### -1.293872461001E9 +1.293843661001E9 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_1 diff --git a/ql/src/test/results/clientpositive/timestamp_2.q.out b/ql/src/test/results/clientpositive/timestamp_2.q.out index f9bfb0937e..15c8b76f6c 100644 --- a/ql/src/test/results/clientpositive/timestamp_2.q.out +++ b/ql/src/test/results/clientpositive/timestamp_2.q.out @@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.293872461E9 +1.293843661E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.2938724611E9 +1.2938436611E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.2938724610001E9 +1.2938436610001E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -77 +-51 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### --4787 +31949 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1293872461 +1293843661 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.29387251E9 +1.29384371E9 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 @@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_2 #### A masked pattern was here #### -1.293872461001E9 +1.293843661001E9 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_2 diff --git a/ql/src/test/results/clientpositive/timestamp_3.q.out b/ql/src/test/results/clientpositive/timestamp_3.q.out index 0664abf658..9218501d39 100644 --- a/ql/src/test/results/clientpositive/timestamp_3.q.out +++ b/ql/src/test/results/clientpositive/timestamp_3.q.out @@ -100,7 +100,7 @@ POSTHOOK: query: select cast(t as string) from timestamp_3 limit 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_3 #### A masked pattern was here #### -2011-04-29 20:46:56.4485 +2011-04-30 03:46:56.4485 PREHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timestamp_3 group by t PREHOOK: type: QUERY PREHOOK: Input: default@timestamp_3 @@ -109,7 +109,7 @@ POSTHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timest POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_3 #### A masked pattern was here #### -2011-04-29 20:46:56.4485 1.3041352164485E9 1 1.3041352164485E9 1.3041352164485E9 +2011-04-30 03:46:56.4485 1.3041352164485E9 1 1.3041352164485E9 1.3041352164485E9 PREHOOK: query: drop table timestamp_3 PREHOOK: type: DROPTABLE PREHOOK: Input: default@timestamp_3 diff --git a/ql/src/test/results/clientpositive/timestamp_comparison2.q.out b/ql/src/test/results/clientpositive/timestamp_comparison2.q.out index 8ef2552cb9..7c83874859 100644 --- a/ql/src/test/results/clientpositive/timestamp_comparison2.q.out +++ b/ql/src/test/results/clientpositive/timestamp_comparison2.q.out @@ -28,10 +28,10 @@ FROM alltypesorc WHERE ((ctinyint != 0) AND - (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) + (((ctimestamp1 <= timestamp('1970-01-01 00:00:00')) OR ((ctinyint = cint) OR (cstring2 LIKE 'ss'))) AND ((988888 < cdouble) - OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble))))) + OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble))))) PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc #### A masked pattern was here #### @@ -40,10 +40,10 @@ FROM alltypesorc WHERE ((ctinyint != 0) AND - (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) + (((ctimestamp1 <= timestamp('1970-01-01 00:00:00')) OR ((ctinyint = cint) OR (cstring2 LIKE 'ss'))) AND ((988888 < cdouble) - OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble))))) + OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble))))) POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/timestamp_dst.q.out b/ql/src/test/results/clientpositive/timestamp_dst.q.out new file mode 100644 index 0000000000..72a5ebf63e --- /dev/null +++ b/ql/src/test/results/clientpositive/timestamp_dst.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101' +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101' +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2015-03-08 02:10:00.101 diff --git a/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out b/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out index 196607807d..9924b7d0ab 100644 --- a/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out +++ b/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out @@ -104,32 +104,32 @@ where cbigint % 250 = 0 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1969-12-08 10:43:03.25 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL -1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1970-01-19 04:24:39 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL -1969-12-31 15:59:59.97 1969-12-31 15:59:59.8 NULL 1970-01-17 05:10:52.25 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL -1969-12-31 15:59:59.949 NULL 1970-01-09 14:53:20.971 1970-01-12 20:45:23.25 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 15:59:59.949 NULL 1970-01-09 07:39:13.882 1969-12-09 07:45:32.75 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 16:00:00.02 1969-12-31 16:00:15.601 NULL 1969-12-27 11:19:26.75 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL -1969-12-31 15:59:59.962 1969-12-31 16:00:15.601 NULL 1969-12-10 03:41:51 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL -1969-12-31 15:59:59.995 1969-12-31 16:00:15.601 NULL 1970-01-07 18:06:56 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL -1969-12-31 16:00:00.048 1969-12-31 16:00:15.601 NULL 1969-12-22 11:03:59 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL -1969-12-31 16:00:00.008 NULL 1969-12-24 00:12:58.862 1969-12-20 21:16:47.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1969-12-30 11:24:23.566 1969-12-16 11:20:17.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1970-01-09 23:39:39.664 1970-01-10 17:09:21.5 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1969-12-23 21:59:27.689 1970-01-19 01:16:31.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1970-01-10 23:29:48.972 1969-12-10 02:41:39 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:00.008 NULL 1970-01-11 10:34:27.246 1970-01-14 14:49:59.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 15:59:59.941 1969-12-31 15:59:52.804 NULL 1969-12-13 02:11:50 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL -1969-12-31 15:59:59.979 1969-12-31 15:59:52.804 NULL 1970-01-18 12:27:09 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL -1969-12-31 15:59:59.94 1969-12-31 15:59:52.804 NULL 1970-01-18 05:11:54.75 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL -1969-12-31 15:59:59.986 1969-12-31 15:59:52.804 NULL 1969-12-13 16:50:00.5 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL -1969-12-31 16:00:00.059 1969-12-31 15:59:52.804 NULL 1969-12-18 11:57:25.5 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL -1969-12-31 15:59:59.992 1969-12-31 15:59:52.804 NULL 1969-12-10 06:06:48.5 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL -1969-12-31 16:00:00.005 1969-12-31 15:59:52.804 NULL 1969-12-19 21:53:12.5 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL -1969-12-31 15:59:59.976 1969-12-31 15:59:52.804 NULL 1970-01-10 06:18:31 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL -1969-12-31 15:59:59.95 1969-12-31 15:59:52.804 NULL 1969-12-19 17:33:32.75 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL -1969-12-31 16:00:00.011 NULL 1969-12-30 22:03:04.018 1970-01-21 12:50:53.75 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL -1969-12-31 16:00:00.011 NULL 1969-12-27 18:49:09.583 1970-01-14 22:35:27 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL +1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1969-12-08 18:43:03.25 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.748 NULL NULL +1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1970-01-19 12:24:39 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.817 NULL NULL +1969-12-31 23:59:59.97 1969-12-31 23:59:59.8 NULL 1970-01-17 13:10:52.25 1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1970-01-01 00:00:12.935 NULL NULL +1969-12-31 23:59:59.949 NULL 1970-01-09 22:53:20.971 1970-01-13 04:45:23.25 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1969-12-31 23:59:59.949 NULL 1970-01-09 15:39:13.882 1969-12-09 15:45:32.75 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1970-01-01 00:00:00.02 1970-01-01 00:00:15.601 NULL 1969-12-27 19:19:26.75 1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.129 NULL NULL +1969-12-31 23:59:59.962 1970-01-01 00:00:15.601 NULL 1969-12-10 11:41:51 1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:58.614 NULL NULL +1969-12-31 23:59:59.995 1970-01-01 00:00:15.601 NULL 1970-01-08 02:06:56 1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1970-01-01 00:00:04.679 NULL NULL +1970-01-01 00:00:00.048 1970-01-01 00:00:15.601 NULL 1969-12-22 19:03:59 1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:50.235 NULL NULL +1970-01-01 00:00:00.008 NULL 1969-12-24 08:12:58.862 1969-12-21 05:16:47.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1969-12-30 19:24:23.566 1969-12-16 19:20:17.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1970-01-10 07:39:39.664 1970-01-11 01:09:21.5 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1969-12-24 05:59:27.689 1970-01-19 09:16:31.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1970-01-11 07:29:48.972 1969-12-10 10:41:39 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:00.008 NULL 1970-01-11 18:34:27.246 1970-01-14 22:49:59.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1969-12-31 23:59:59.941 1969-12-31 23:59:52.804 NULL 1969-12-13 10:11:50 1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:13.15 NULL NULL +1969-12-31 23:59:59.979 1969-12-31 23:59:52.804 NULL 1970-01-18 20:27:09 1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:55.9 NULL NULL +1969-12-31 23:59:59.94 1969-12-31 23:59:52.804 NULL 1970-01-18 13:11:54.75 1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:52.408 NULL NULL +1969-12-31 23:59:59.986 1969-12-31 23:59:52.804 NULL 1969-12-14 00:50:00.5 1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:11.065 NULL NULL +1970-01-01 00:00:00.059 1969-12-31 23:59:52.804 NULL 1969-12-18 19:57:25.5 1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.956 NULL NULL +1969-12-31 23:59:59.992 1969-12-31 23:59:52.804 NULL 1969-12-10 14:06:48.5 1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:03.136 NULL NULL +1970-01-01 00:00:00.005 1969-12-31 23:59:52.804 NULL 1969-12-20 05:53:12.5 1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.973 NULL NULL +1969-12-31 23:59:59.976 1969-12-31 23:59:52.804 NULL 1970-01-10 14:18:31 1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.145 NULL NULL +1969-12-31 23:59:59.95 1969-12-31 23:59:52.804 NULL 1969-12-20 01:33:32.75 1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:54.733 NULL NULL +1970-01-01 00:00:00.011 NULL 1969-12-31 06:03:04.018 1970-01-21 20:50:53.75 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL +1970-01-01 00:00:00.011 NULL 1969-12-28 02:49:09.583 1970-01-15 06:35:27 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL PREHOOK: query: explain select @@ -236,29 +236,29 @@ where cbigint % 250 = 0 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1906-06-05 13:34:10 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL -1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 2020-09-11 19:50:00 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL -1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 2015-04-23 22:10:50 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL -1969-12-31 15:59:09 NULL 1994-07-07 10:09:31 2003-05-25 21:27:30 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 15:59:09 NULL 1993-09-08 22:51:22 1908-10-29 07:05:50 1969-12-31 15:59:09 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1958-07-07 21:05:50 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL -1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1911-02-07 01:30:00 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL -1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1989-05-28 20:33:20 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL -1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1944-10-18 03:23:20 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL -1969-12-31 16:00:08 NULL 1949-01-13 00:21:02 1940-06-26 15:47:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1966-09-27 07:32:46 1928-05-26 10:07:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1995-07-07 22:01:04 1997-07-05 20:58:20 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1948-10-12 08:01:29 2020-05-04 04:20:50 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1998-03-27 00:56:12 1910-12-27 06:10:00 1969-12-31 16:00:08 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 16:00:08 NULL 1999-07-01 15:14:06 2008-03-13 02:07:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL -1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1919-02-22 13:13:20 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL -1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 2018-11-16 20:30:00 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL -1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 2018-01-18 14:32:30 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL -1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1920-10-24 09:28:20 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL -1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1933-12-12 05:05:00 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL -1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1911-05-18 17:28:20 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL -1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1937-10-25 22:48:20 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL -1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1996-04-09 21:36:40 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL -1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1937-04-28 15:05:50 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL -1969-12-31 16:00:11 NULL 1967-12-14 19:06:58 2027-02-19 08:15:50 1969-12-31 16:00:11 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL -1969-12-31 16:00:11 NULL 1959-05-16 04:19:43 2009-01-30 06:50:00 1969-12-31 16:00:11 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL +1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1906-06-05 21:34:10 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.748 NULL NULL +1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 2020-09-12 02:50:00 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.817 NULL NULL +1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 2015-04-24 05:10:50 1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1970-01-01 00:00:12.935 NULL NULL +1969-12-31 23:59:09 NULL 1994-07-07 17:09:31 2003-05-26 04:27:30 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1969-12-31 23:59:09 NULL 1993-09-09 05:51:22 1908-10-29 15:05:50 1969-12-31 23:59:09 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:08.451 NULL NULL +1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1958-07-08 04:05:50 1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:45.129 NULL NULL +1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1911-02-07 09:30:00 1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:58.614 NULL NULL +1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1989-05-29 03:33:20 1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1970-01-01 00:00:04.679 NULL NULL +1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1944-10-18 10:23:20 1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 23:59:50.235 NULL NULL +1970-01-01 00:00:08 NULL 1949-01-13 08:21:02 1940-06-26 23:47:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1966-09-27 14:32:46 1928-05-26 18:07:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1995-07-08 05:01:04 1997-07-06 03:58:20 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1948-10-12 15:01:29 2020-05-04 11:20:50 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1998-03-27 08:56:12 1910-12-27 14:10:00 1970-01-01 00:00:08 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1970-01-01 00:00:08 NULL 1999-07-01 22:14:06 2008-03-13 09:07:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:15.892 NULL NULL +1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1919-02-22 21:13:20 1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:13.15 NULL NULL +1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 2018-11-17 04:30:00 1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:55.9 NULL NULL +1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 2018-01-18 22:32:30 1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:52.408 NULL NULL +1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1920-10-24 17:28:20 1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:11.065 NULL NULL +1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1933-12-12 13:05:00 1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.956 NULL NULL +1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1911-05-19 01:28:20 1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:03.136 NULL NULL +1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1937-10-26 06:48:20 1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1970-01-01 00:00:10.973 NULL NULL +1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1996-04-10 04:36:40 1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:53.145 NULL NULL +1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1937-04-28 23:05:50 1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 23:59:54.733 NULL NULL +1970-01-01 00:00:11 NULL 1967-12-15 03:06:58 2027-02-19 16:15:50 1970-01-01 00:00:11 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL +1970-01-01 00:00:11 NULL 1959-05-16 11:19:43 2009-01-30 14:50:00 1970-01-01 00:00:11 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1970-01-01 00:00:02.351 NULL NULL diff --git a/ql/src/test/results/clientpositive/timestamp_udf.q.out b/ql/src/test/results/clientpositive/timestamp_udf.q.out index 47f84cbfd7..9f705224f7 100644 --- a/ql/src/test/results/clientpositive/timestamp_udf.q.out +++ b/ql/src/test/results/clientpositive/timestamp_udf.q.out @@ -54,7 +54,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamp_udf #### A masked pattern was here #### -1304690889 2011 5 6 6 18 7 8 9 2011-05-06 +1304665689 2011 5 6 6 18 7 8 9 2011-05-06 PREHOOK: query: select date_add(t, 5), date_sub(t, 10) from timestamp_udf PREHOOK: type: QUERY diff --git a/ql/src/test/results/clientpositive/timestamptz_3.q.out b/ql/src/test/results/clientpositive/timestamptz_3.q.out index 196c584730..68affaff5e 100644 --- a/ql/src/test/results/clientpositive/timestamptz_3.q.out +++ b/ql/src/test/results/clientpositive/timestamptz_3.q.out @@ -36,7 +36,7 @@ POSTHOOK: query: select cast(to_epoch_milli(t) as timestamp) from tstz1 POSTHOOK: type: QUERY POSTHOOK: Input: default@tstz1 #### A masked pattern was here #### -2016-01-03 12:26:34 +2016-01-03 20:26:34 PREHOOK: query: select cast(t as timestamp) from tstz1 PREHOOK: type: QUERY PREHOOK: Input: default@tstz1 @@ -54,4 +54,4 @@ POSTHOOK: query: select cast(to_epoch_milli(t) as timestamp) from tstz1 POSTHOOK: type: QUERY POSTHOOK: Input: default@tstz1 #### A masked pattern was here #### -2016-01-03 12:26:34 +2016-01-03 20:26:34 diff --git a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out index 44e4632121..706246012a 100644 --- a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out +++ b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out @@ -12,9 +12,9 @@ POSTHOOK: Lineage: date_decimal_test.cdate EXPRESSION [(alltypesorc)alltypesorc. POSTHOOK: Lineage: date_decimal_test.cdecimal EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: date_decimal_test.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] POSTHOOK: Lineage: date_decimal_test.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] -PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -41,12 +41,12 @@ STAGE PLANS: predicate: (cdouble is not null and cint is not null) (type: boolean) Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE Select Operator - expressions: cdate (type: date), cdecimal (type: decimal(20,10)) - outputColumnNames: _col0, _col1 + expressions: cdate (type: date), cint (type: int), cdecimal (type: decimal(20,10)) + outputColumnNames: _col0, _col1, _col2 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [2, 3] + projectedOutputColumnNums: [2, 0, 3] Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 @@ -81,21 +81,21 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +PREHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 PREHOOK: type: QUERY PREHOOK: Input: default@date_decimal_test #### A masked pattern was here #### -POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 +POSTHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@date_decimal_test #### A masked pattern was here #### -1970-01-06 -7959.5837837838 -1970-01-06 -2516.4135135135 -1970-01-06 -9445.0621621622 -1970-01-06 -5713.7459459459 -1970-01-06 8963.6405405405 -1970-01-06 4193.6243243243 -1970-01-06 2964.3864864865 -1970-01-06 -4673.2540540541 -1970-01-06 -9216.8945945946 -1970-01-06 -9287.3756756757 +1970-01-07 528534767 -7959.5837837838 +1970-01-07 528534767 -2516.4135135135 +1970-01-07 528534767 -9445.0621621622 +1970-01-07 528534767 -5713.7459459459 +1970-01-07 528534767 8963.6405405405 +1970-01-07 528534767 4193.6243243243 +1970-01-07 528534767 2964.3864864865 +1970-01-07 528534767 -4673.2540540541 +1970-01-07 528534767 -9216.8945945946 +1970-01-07 528534767 -9287.3756756757 diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java index e1ecdc1d40..71a9cfcab0 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.serde2; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -26,6 +24,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.Timestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java index 83e5d6822e..9dabb6dd48 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.serde2.avro; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -28,15 +26,14 @@ import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Type; -import org.apache.avro.generic.GenericArray; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericData.Fixed; import org.apache.avro.generic.GenericEnumSymbol; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -214,7 +211,7 @@ private Object serializePrimitive(TypeInfo typeInfo, PrimitiveObjectInspector fi case TIMESTAMP: Timestamp timestamp = ((TimestampObjectInspector) fieldOI).getPrimitiveJavaObject(structFieldData); - return timestamp.getTime(); + return timestamp.getMillis(); case UNKNOWN: throw new AvroSerdeException("Received UNKNOWN primitive category."); case VOID: diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java index 2f987bf1af..c73e7d6b0f 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java @@ -19,17 +19,17 @@ package org.apache.hadoop.hive.serde2.binarysortable.fast; import java.io.IOException; -import java.sql.Date; -import java.sql.Timestamp; import java.util.Arrays; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java index 3aff6106eb..4d4717fade 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java @@ -19,17 +19,17 @@ package org.apache.hadoop.hive.serde2.fast; import java.io.IOException; -import java.sql.Date; -import java.sql.Timestamp; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; /* diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java index 600f922e57..2a821eaa01 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java @@ -20,12 +20,10 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import java.sql.Timestamp; import java.time.format.DateTimeFormatter; -import java.util.Date; import org.apache.hadoop.hive.common.type.HiveDecimal; -import org.apache.hadoop.hive.ql.util.TimestampUtils; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -34,7 +32,6 @@ /** * TimestampWritable - * Writable equivalent of java.sq.Timestamp * * Timestamps are of the format * YYYY-MM-DD HH:MM:SS.[fff...] @@ -67,7 +64,7 @@ public static final DateTimeFormatter DATE_TIME_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); - private Timestamp timestamp = new Timestamp(0); + private Timestamp timestamp = new Timestamp(); /** * true if data is stored in timestamp field rather than byte arrays. @@ -113,20 +110,12 @@ public void set(byte[] bytes, int offset) { clearTimestamp(); } - public void setTime(long time) { - timestamp.setTime(time); - bytesEmpty = true; - timestampEmpty = false; - } - public void set(Timestamp t) { if (t == null) { - timestamp.setTime(0); - timestamp.setNanos(0); + timestamp.setLocalDateTime(null); return; } - timestamp.setTime(t.getTime()); - timestamp.setNanos(t.getNanos()); + timestamp.setLocalDateTime(t.getLocalDateTime()); bytesEmpty = true; timestampEmpty = false; } @@ -144,12 +133,10 @@ public void set(TimestampWritable t) { } public static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) { - ((Date) timestamp).setTime(secondsAsMillis); - timestamp.setNanos(nanos); + timestamp.setTimeInMillis(secondsAsMillis, nanos); } public void setInternal(long secondsAsMillis, int nanos) { - // This is our way of documenting that we are MUTATING the contents of // this writable's internal timestamp. updateTimestamp(timestamp, secondsAsMillis, nanos); @@ -173,7 +160,7 @@ public void writeToByteStream(RandomAccessOutput byteStream) { */ public long getSeconds() { if (!timestampEmpty) { - return TimestampUtils.millisToSeconds(timestamp.getTime()); + return timestamp.getSeconds(); } else if (!bytesEmpty) { return TimestampWritable.getSeconds(currentBytes, offset); } else { @@ -305,7 +292,7 @@ private void checkBytes() { public double getDouble() { double seconds, nanos; if (bytesEmpty) { - seconds = TimestampUtils.millisToSeconds(timestamp.getTime()); + seconds = timestamp.getSeconds(); nanos = timestamp.getNanos(); } else { seconds = getSeconds(); @@ -315,7 +302,7 @@ public double getDouble() { } public static long getLong(Timestamp timestamp) { - return timestamp.getTime() / 1000; + return timestamp.getSeconds(); } public void readFields(DataInput in) throws IOException { @@ -386,7 +373,17 @@ public String toString() { return timestamp.toString(); } - return timestamp.toLocalDateTime().format(DATE_TIME_FORMAT); + String timestampString = timestamp.toString(); + if (timestampString.length() > 19) { + if (timestampString.length() == 21) { + if (timestampString.substring(19).compareTo(".0") == 0) { + return timestamp.getLocalDateTime().format(DATE_TIME_FORMAT); + } + } + return timestamp.getLocalDateTime().format(DATE_TIME_FORMAT) + timestampString.substring(19); + } + + return timestamp.getLocalDateTime().format(DATE_TIME_FORMAT); } @Override @@ -400,8 +397,7 @@ public int hashCode() { private void populateTimestamp() { long seconds = getSeconds(); int nanos = getNanos(); - timestamp.setTime(seconds * 1000); - timestamp.setNanos(nanos); + timestamp.setTimeInSeconds(seconds, nanos); } /** Static methods **/ @@ -461,10 +457,9 @@ public static int getNanos(byte[] bytes, int offset) { */ public static void convertTimestampToBytes(Timestamp t, byte[] b, int offset) { - long millis = t.getTime(); + long seconds = t.getSeconds(); int nanos = t.getNanos(); - long seconds = TimestampUtils.millisToSeconds(millis); boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE; boolean hasDecimal = setNanosBytes(nanos, b, offset+4, hasSecondVInt); @@ -522,34 +517,38 @@ public HiveDecimal getHiveDecimal() { public static HiveDecimal getHiveDecimal(Timestamp timestamp) { // The BigDecimal class recommends not converting directly from double to BigDecimal, // so we convert through a string... - Double timestampDouble = TimestampUtils.getDouble(timestamp); + long seconds = timestamp.getSeconds(); + Double timestampDouble = seconds + ((double) timestamp.getNanos()) / 1000000000; HiveDecimal result = HiveDecimal.create(timestampDouble.toString()); return result; } - /** * Converts the time in seconds or milliseconds to a timestamp. * @param time time in seconds or in milliseconds * @return the timestamp */ public static Timestamp longToTimestamp(long time, boolean intToTimestampInSeconds) { - // If the time is in seconds, converts it to milliseconds first. - return new Timestamp(intToTimestampInSeconds ? time * 1000 : time); + // If the time is in seconds, converts it to milliseconds first. + if (intToTimestampInSeconds) { + return Timestamp.ofEpochSecond(time); + } + return Timestamp.ofEpochMilli(time); } public static void setTimestamp(Timestamp t, byte[] bytes, int offset) { long seconds = getSeconds(bytes, offset); - t.setTime(seconds * 1000); + int nanos; if (hasDecimalOrSecondVInt(bytes[offset])) { - t.setNanos(getNanos(bytes, offset + 4)); + nanos = getNanos(bytes, offset + 4); } else { - t.setNanos(0); + nanos = 0; } + t.setTimeInSeconds(seconds, nanos); } public static Timestamp createTimestamp(byte[] bytes, int offset) { - Timestamp t = new Timestamp(0); + Timestamp t = new Timestamp(); TimestampWritable.setTimestamp(t, bytes, offset); return t; } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java index c50cd40cd9..a9da2291db 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java @@ -20,8 +20,8 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; -import java.sql.Date; +import org.apache.hadoop.hive.common.type.Date; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.DateWritable; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java index ee801eedb6..730aecd016 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java @@ -20,10 +20,10 @@ import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; -import java.sql.Timestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java index 17c0357da6..7faa4aec67 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java @@ -17,18 +17,18 @@ */ package org.apache.hadoop.hive.serde2.lazy; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java index dd88da889c..0274ff76ea 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java @@ -21,10 +21,10 @@ import java.io.IOException; import java.nio.charset.CharacterCodingException; import java.nio.charset.StandardCharsets; -import java.sql.Date; import java.util.Arrays; import java.util.List; +import org.apache.hadoop.hive.common.type.Date; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java index 356326cb41..c5b62db480 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java @@ -20,14 +20,13 @@ import java.io.IOException; import java.nio.ByteBuffer; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; import java.util.Map; import org.apache.commons.codec.binary.Base64; +import org.apache.hadoop.hive.common.type.Date; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; @@ -35,6 +34,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java index 3bc4ff71fd..22afc96ec1 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.lazy.LazyDate; import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java index e0f993e145..dc8b687367 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive; -import java.sql.Timestamp; import java.util.List; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp; import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java index cd4e619e3b..28d91e9a13 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java @@ -19,13 +19,12 @@ package org.apache.hadoop.hive.serde2.lazybinary.fast; import java.io.IOException; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.slf4j.Logger; @@ -35,6 +34,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java index 93a18f7337..2f8d1e5d3f 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Date; - import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java index bd86c2225d..7574645765 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java index 2453bc67cb..6bd61ed823 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java index d93d71992d..5ff73c6d58 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -50,7 +49,7 @@ public Object set(Object o, Date value) { if (value == null) { return null; } - ((Date) o).setTime(value.getTime()); + ((Date) o).setTimeInDays(value.getDays()); return o; } @@ -58,12 +57,12 @@ public Object set(Object o, DateWritable d) { if (d == null) { return null; } - ((Date) o).setTime(d.get().getTime()); + ((Date) o).setTimeInDays(d.get().getDays()); return o; } public Object create(Date value) { - return new Date(value.getTime()); + return Date.ofEpochDay(value.getDays()); } } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java index 1e805ba974..7aebf6a217 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -45,9 +44,7 @@ public Object copyObject(Object o) { return null; } Timestamp source = (Timestamp) o; - Timestamp copy = new Timestamp(source.getTime()); - copy.setNanos(source.getNanos()); - return copy; + return new Timestamp(source.getLocalDateTime()); } public Timestamp get(Object o) { @@ -58,7 +55,7 @@ public Object set(Object o, Timestamp value) { if (value == null) { return null; } - ((Timestamp) o).setTime(value.getTime()); + ((Timestamp) o).setLocalDateTime(value.getLocalDateTime()); return o; } @@ -72,13 +69,12 @@ public Object set(Object o, TimestampWritable tw) { return null; } Timestamp t = (Timestamp) o; - t.setTime(tw.getTimestamp().getTime()); - t.setNanos(tw.getTimestamp().getNanos()); + t.setLocalDateTime(tw.getTimestamp().getLocalDateTime()); return t; } public Object create(Timestamp value) { - return new Timestamp(value.getTime()); + return new Timestamp(value.getLocalDateTime()); } public Object create(byte[] bytes, int offset) { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java index ba20a2c56d..1e12ccaf3e 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Date; -import java.sql.Timestamp; import java.time.ZoneId; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; @@ -255,7 +255,7 @@ public DateConverter(PrimitiveObjectInspector inputOI, SettableDateObjectInspector outputOI) { this.inputOI = inputOI; this.outputOI = outputOI; - r = outputOI.create(new Date(0)); + r = outputOI.create(new Date()); } public Object convert(Object input) { @@ -277,7 +277,7 @@ public TimestampConverter(PrimitiveObjectInspector inputOI, SettableTimestampObjectInspector outputOI) { this.inputOI = inputOI; this.outputOI = outputOI; - r = outputOI.create(new Timestamp(0)); + r = outputOI.create(new Timestamp()); } public void setIntToTimestampInSeconds(boolean intToTimestampInSeconds) { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java index 8cf0744529..db4ec1d2aa 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.nio.charset.CharacterCodingException; import java.nio.charset.StandardCharsets; -import java.sql.Date; -import java.sql.Timestamp; import java.time.DateTimeException; import java.time.ZoneId; import java.util.HashMap; @@ -32,15 +30,16 @@ import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; -import org.apache.hadoop.hive.common.type.TimestampTZ; -import org.apache.hadoop.hive.common.type.TimestampTZUtil; -import org.apache.hadoop.hive.ql.util.TimestampUtils; -import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; -import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.apache.hadoop.hive.common.type.TimestampTZ; +import org.apache.hadoop.hive.common.type.TimestampTZUtil; +import org.apache.hadoop.hive.ql.util.TimestampUtils; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -51,6 +50,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.LazyLong; @@ -1126,7 +1126,7 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) { } catch (IllegalArgumentException e) { Timestamp ts = getTimestampFromString(s); if (ts != null) { - result = new Date(ts.getTime()); + result = Date.ofEpochMilli(ts.getMillis()); } else { result = null; } @@ -1140,7 +1140,7 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) { } catch (IllegalArgumentException e) { Timestamp ts = getTimestampFromString(val); if (ts != null) { - result = new Date(ts.getTime()); + result = Date.ofEpochMilli(ts.getMillis()); } else { result = null; } @@ -1212,8 +1212,8 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI, result = TimestampUtils.doubleToTimestamp(((DoubleObjectInspector) inputOI).get(o)); break; case DECIMAL: - result = TimestampUtils.decimalToTimestamp(((HiveDecimalObjectInspector) inputOI) - .getPrimitiveJavaObject(o)); + result = TimestampUtils.decimalToTimestamp( + ((HiveDecimalObjectInspector) inputOI).getPrimitiveJavaObject(o)); break; case STRING: StringObjectInspector soi = (StringObjectInspector) inputOI; @@ -1225,8 +1225,8 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI, result = getTimestampFromString(getString(o, inputOI)); break; case DATE: - result = new Timestamp( - ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get().getTime()); + result = Timestamp.ofEpochMilli( + ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get().getMillis()); break; case TIMESTAMP: result = ((TimestampObjectInspector) inputOI).getPrimitiveWritableObject(o).getTimestamp(); @@ -1247,7 +1247,7 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI, return result; } - static Timestamp getTimestampFromString(String s) { + public static Timestamp getTimestampFromString(String s) { Timestamp result; s = s.trim(); s = trimNanoTimestamp(s); @@ -1261,7 +1261,7 @@ static Timestamp getTimestampFromString(String s) { } catch (IllegalArgumentException e) { // Let's try to parse it as timestamp with time zone and transform try { - result = Timestamp.from(TimestampTZUtil.parse(s).getZonedDateTime().toInstant()); + result = new Timestamp(TimestampTZUtil.parse(s).getZonedDateTime().toLocalDateTime()); } catch (DateTimeException e2) { result = null; } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java index 831411defe..8e81a17690 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; /** diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java index c676a62266..00f7642073 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java index f277232960..cabc4e8988 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java index 5d73806844..217d6c31cd 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Date; - +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java index 47b51f5da8..dc0fbc6871 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import java.sql.Timestamp; - +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java index 749d8accf7..c9856e89ef 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java @@ -18,19 +18,19 @@ package org.apache.hadoop.hive.serde2; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Random; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java index 22aadbb843..72a404479e 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java @@ -19,8 +19,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -29,11 +27,13 @@ import junit.framework.TestCase; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java index c270d71470..cd34671fa3 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java @@ -17,18 +17,18 @@ */ package org.apache.hadoop.hive.serde2.binarysortable; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Random; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java index 82d126a428..29e7f3354c 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java @@ -17,26 +17,21 @@ */ package org.apache.hadoop.hive.serde2.binarysortable; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.Random; -import junit.framework.TestCase; - -import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hive.common.type.HiveBaseChar; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -46,12 +41,12 @@ import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.apache.hadoop.io.Writable; import org.apache.hive.common.util.DateUtils; +import junit.framework.TestCase; + // Just the primitive types. public class MyTestPrimitiveClass { diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java index 97eb967096..1768aac7d2 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java @@ -18,26 +18,32 @@ package org.apache.hadoop.hive.serde2.io; -import com.google.code.tempusfugit.concurrency.annotations.*; -import com.google.code.tempusfugit.concurrency.*; -import org.junit.*; +import com.google.code.tempusfugit.concurrency.ConcurrentRule; +import com.google.code.tempusfugit.concurrency.RepeatingRule; +import com.google.code.tempusfugit.concurrency.annotations.Concurrent; +import com.google.code.tempusfugit.concurrency.annotations.Repeating; +import org.apache.hadoop.hive.common.type.Date; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.*; -import java.io.*; -import java.sql.Date; -import java.text.DateFormat; -import java.text.SimpleDateFormat; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; import java.util.Calendar; -import java.util.GregorianCalendar; import java.util.LinkedList; import java.util.TimeZone; import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class TestDateWritable { private static final Logger LOG = LoggerFactory.getLogger(TestDateWritable.class); @@ -104,7 +110,7 @@ public void testGettersSetters() { // Getters assertEquals(date1, dw1.get()); - assertEquals(date1.getTime() / 1000, dw1.getTimeInSeconds()); + assertEquals(date1.getSeconds(), dw1.getTimeInSeconds()); dw4.set(Date.valueOf("1970-01-02")); assertEquals(1, dw4.getDays()); @@ -151,12 +157,11 @@ public void testDateValueOf() { @BeforeClass public static void setupDateStrings() { - DateFormat format = new SimpleDateFormat("yyyy-MM-dd"); Date initialDate = Date.valueOf("2014-01-01"); Calendar cal = Calendar.getInstance(); - cal.setTime(initialDate); + cal.setTimeInMillis(initialDate.getMillis()); for (int idx = 0; idx < 365; ++idx) { - dateStrings[idx] = format.format(cal.getTime()); + dateStrings[idx] = Date.ofEpochMilli(cal.getTimeInMillis()).toString(); cal.add(1, Calendar.DAY_OF_YEAR); } } @@ -176,21 +181,20 @@ public DateTestCallable(LinkedList bad, String tz) { @Override public Void call() throws Exception { - SimpleDateFormat sdf = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss"); // Iterate through each day of the year, make sure Date/DateWritable match Date originalDate = Date.valueOf("1900-01-01"); Calendar cal = Calendar.getInstance(); - cal.setTimeInMillis(originalDate.getTime()); + cal.setTimeInMillis(originalDate.getMillis()); for (int idx = 0; idx < 365*200; ++idx) { - originalDate = new Date(cal.getTimeInMillis()); + originalDate = Date.ofEpochMilli(cal.getTimeInMillis()); // Make sure originalDate is at midnight in the local time zone, // since DateWritable will generate dates at that time. originalDate = Date.valueOf(originalDate.toString()); DateWritable dateWritable = new DateWritable(originalDate); - Date actual = dateWritable.get(false); + Date actual = dateWritable.get(); if (!originalDate.equals(actual)) { - String originalStr = sdf.format(originalDate); - String actualStr = sdf.format(actual); + String originalStr = originalDate.toString(); + String actualStr = actual.toString(); if (originalStr.substring(0, 10).equals(actualStr.substring(0, 10))) continue; bad.add(new DtMismatch(originalStr, actualStr, tz)); } diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java index 3fe472ee8b..b12da28e94 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java @@ -26,7 +26,6 @@ import java.io.DataOutputStream; import java.io.IOException; import java.math.BigDecimal; -import java.sql.Timestamp; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -40,6 +39,7 @@ import static org.junit.Assert.*; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; @@ -68,10 +68,10 @@ protected DateFormat initialValue() { private static long getSeconds(Timestamp ts) { // To compute seconds, we first subtract the milliseconds stored in the nanos field of the // Timestamp from the result of getTime(). - long seconds = (ts.getTime() - ts.getNanos() / 1000000) / 1000; + long seconds = (ts.getMillis() - ts.getNanos() / 1000000) / 1000; // It should also be possible to calculate this based on ts.getTime() only. - assertEquals(seconds, TimestampUtils.millisToSeconds(ts.getTime())); + assertEquals(seconds, TimestampUtils.millisToSeconds(ts.getMillis())); return seconds; } @@ -181,9 +181,9 @@ private static TimestampWritable serializeDeserializeAndCheckTimestamp(Timestamp fromBinSort.setBinarySortable(binarySortableBytes, binarySortableOffset); assertTSWEquals(tsw, fromBinSort); - long timeSeconds = ts.getTime() / 1000; + long timeSeconds = ts.getSeconds(); if (0 <= timeSeconds && timeSeconds <= Integer.MAX_VALUE) { - assertEquals(new Timestamp(timeSeconds * 1000), + assertEquals(Timestamp.ofEpochSecond(timeSeconds), fromIntAndVInts((int) timeSeconds, 0).getTimestamp()); int nanos = reverseNanos(ts.getNanos()); @@ -288,7 +288,7 @@ public void testTimestampsWithinPositiveIntRange() throws IOException { Random rand = new Random(294722773L); for (int i = 0; i < 10000; ++i) { long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000; - checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand)); + checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand)); } } @@ -306,7 +306,7 @@ public void testTimestampsOutsidePositiveIntRange() throws IOException { Random rand = new Random(789149717L); for (int i = 0; i < 10000; ++i) { long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand); - checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand)); + checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand)); } } @@ -315,7 +315,7 @@ public void testTimestampsOutsidePositiveIntRange() throws IOException { public void testTimestampsInFullRange() throws IOException { Random rand = new Random(2904974913L); for (int i = 0; i < 10000; ++i) { - checkTimestampWithAndWithoutNanos(new Timestamp(rand.nextLong()), randomNanos(rand)); + checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(rand.nextLong()), randomNanos(rand)); } } @@ -326,9 +326,8 @@ public void testToFromDouble() { for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) { for (int i = 0; i < 10000; ++i) { long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand); - Timestamp ts = new Timestamp(millis); int nanos = randomNanos(rand, nanosPrecision); - ts.setNanos(nanos); + Timestamp ts = Timestamp.ofEpochMilli(millis, nanos); TimestampWritable tsw = new TimestampWritable(ts); double asDouble = tsw.getDouble(); int recoveredNanos = @@ -356,7 +355,7 @@ private static HiveDecimal timestampToDecimal(Timestamp ts) { public void testDecimalToTimestampRandomly() { Random rand = new Random(294729777L); for (int i = 0; i < 10000; ++i) { - Timestamp ts = new Timestamp( + Timestamp ts = Timestamp.ofEpochMilli( randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand)); ts.setNanos(randomNanos(rand, 9)); // full precision assertEquals(ts, TimestampUtils.decimalToTimestamp(timestampToDecimal(ts))); @@ -367,8 +366,8 @@ public void testDecimalToTimestampRandomly() { @Concurrent(count=4) @Repeating(repetition=100) public void testDecimalToTimestampCornerCases() { - Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33")); - assertEquals(0, ts.getTime() % 1000); + Timestamp ts = Timestamp.ofEpochMilli(parseToMillis("1969-03-04 05:44:33")); + assertEquals(0, ts.getMillis() % 1000); for (int nanos : new int[] { 100000, 900000, 999100000, 999900000 }) { ts.setNanos(nanos); HiveDecimal d = timestampToDecimal(ts); @@ -473,8 +472,7 @@ public void testBinarySortable() { Random rand = new Random(5972977L); List tswList = new ArrayList(); for (int i = 0; i < 50; ++i) { - Timestamp ts = new Timestamp(rand.nextLong()); - ts.setNanos(randomNanos(rand)); + Timestamp ts = Timestamp.ofEpochMilli(rand.nextLong(), randomNanos(rand)); tswList.add(new TimestampWritable(ts)); } for (TimestampWritable tsw1 : tswList) { @@ -509,10 +507,10 @@ public void testSetTimestamp() { } private static void verifySetTimestamp(long time) { - Timestamp t1 = new Timestamp(time); + Timestamp t1 = Timestamp.ofEpochMilli(time); TimestampWritable writable = new TimestampWritable(t1); byte[] bytes = writable.getBytes(); - Timestamp t2 = new Timestamp(0); + Timestamp t2 = new Timestamp(); TimestampWritable.setTimestamp(t2, bytes, 0); assertEquals(t1, t2); } diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java index 6dcc6f8022..1d734eb14a 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java @@ -17,24 +17,23 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import java.sql.Date; -import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass; import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass; -import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo; /** diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java index 732bd42368..118d155478 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java @@ -17,17 +17,16 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import java.sql.Date; -import java.sql.Timestamp; import java.util.Random; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.RandomTypeUtil; -import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo; diff --git a/storage-api/pom.xml b/storage-api/pom.xml index 80fa22ce63..d768f3f9bd 100644 --- a/storage-api/pom.xml +++ b/storage-api/pom.xml @@ -158,8 +158,8 @@ maven-compiler-plugin 3.1 - 1.7 - 1.7 + 1.8 + 1.8 diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/type/Date.java b/storage-api/src/java/org/apache/hadoop/hive/common/type/Date.java new file mode 100644 index 0000000000..08b0679fe8 --- /dev/null +++ b/storage-api/src/java/org/apache/hadoop/hive/common/type/Date.java @@ -0,0 +1,146 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.type; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; + +/** + * This is the internal type for Date. + * The full qualified input format of Date is "yyyy-MM-dd". + */ +public class Date implements Comparable { + + private static final LocalDate EPOCH = LocalDate.of(1970, 1, 1); + private static final DateTimeFormatter FORMATTER; + static { + DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); + builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + FORMATTER = builder.toFormatter(); + } + + private LocalDate localDate; + + public Date() { + this(EPOCH); + } + + public Date(LocalDate localDate) { + setLocalDate(localDate); + } + + public LocalDate getLocalDate() { + return localDate; + } + + public void setLocalDate(LocalDate localDate) { + this.localDate = localDate != null ? localDate : EPOCH; + } + + @Override + public String toString() { + return localDate.format(FORMATTER); + } + + public int hashCode() { + return localDate.hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other instanceof Date) { + return compareTo((Date) other) == 0; + } + return false; + } + + @Override + public int compareTo(Date o) { + return localDate.compareTo(o.localDate); + } + + public int getDays() { + return (int) localDate.toEpochDay(); + } + + public long getSeconds() { + return localDate.atStartOfDay().toEpochSecond(ZoneOffset.UTC); + } + + public long getMillis() { + return localDate.atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli(); + } + + public void setYear(int year) { + localDate = localDate.withYear(year); + } + + public void setMonth(int month) { + localDate = localDate.withMonth(month); + } + + public void setDayOfMonth(int dayOfMonth) { + localDate = localDate.withDayOfMonth(dayOfMonth); + } + + public void setTimeInDays(int epochDay) { + localDate = LocalDate.ofEpochDay(epochDay); + } + + public void setTimeInMillis(long epochMilli) { + localDate = LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC).toLocalDate(); + } + + public static Date valueOf(String s) { + LocalDate localDate; + try { + localDate = LocalDate.parse(s, FORMATTER); + } catch (DateTimeParseException e) { + throw new IllegalArgumentException("Cannot create date, parsing error"); + } + return new Date(localDate); + } + + public static Date ofEpochDay(int epochDay) { + return new Date(LocalDate.ofEpochDay(epochDay)); + } + + public static Date ofEpochMilli(long epochMilli) { + return new Date(LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC).toLocalDate()); + } + + public static Date of(int year, int month, int dayOfMonth) { + return new Date(LocalDate.of(year, month, dayOfMonth)); + } + + /** + * Return a copy of this object. + */ + public Object clone() { + // LocalDateTime is immutable. + return new Date(this.localDate); + } + +} diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java b/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java index ec1b11efbe..9d74c86d42 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.common.type; import java.sql.Date; -import java.sql.Timestamp; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/type/Timestamp.java b/storage-api/src/java/org/apache/hadoop/hive/common/type/Timestamp.java new file mode 100644 index 0000000000..beab6a98d9 --- /dev/null +++ b/storage-api/src/java/org/apache/hadoop/hive/common/type/Timestamp.java @@ -0,0 +1,177 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.type; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * This is the internal type for Timestamp. + * The full qualified input format of Timestamp is + * "yyyy-MM-dd HH:mm:ss[.SSS...]", where the time part is optional. + * If time part is absent, a default '00:00:00.0' will be used. + */ +public class Timestamp implements Comparable { + + private static final LocalDateTime EPOCH = LocalDateTime.of(1970, 1, 1, 0, 0, 0); + private static final Pattern SINGLE_DIGIT_PATTERN = Pattern.compile("[ ]\\d[:]\\d|[:]\\d[:]|[:]\\d($|[ ]|[.])"); + private static final DateTimeFormatter FORMATTER; + static { + DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); + // Date part + builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + // Time part + builder.optionalStart(). + appendLiteral(" ").append(DateTimeFormatter.ofPattern("HH:mm:ss")). + optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 1, 9, true).optionalEnd() + .optionalEnd(); + FORMATTER = builder.toFormatter(); + } + + private LocalDateTime localDateTime; + + public Timestamp() { + this(EPOCH); + } + + public Timestamp(LocalDateTime localDateTime) { + setLocalDateTime(localDateTime); + } + + public LocalDateTime getLocalDateTime() { + return localDateTime; + } + + public void setLocalDateTime(LocalDateTime localDateTime) { + this.localDateTime = localDateTime != null ? localDateTime : EPOCH; + } + + @Override + public String toString() { + return localDateTime.format(FORMATTER); + } + + public int hashCode() { + return localDateTime.hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other instanceof Timestamp) { + return compareTo((Timestamp) other) == 0; + } + return false; + } + + @Override + public int compareTo(Timestamp o) { + return localDateTime.compareTo(o.localDateTime); + } + + public long getSeconds() { + return localDateTime.toEpochSecond(ZoneOffset.UTC); + } + + public void setTimeInSeconds(long epochSecond) { + setTimeInSeconds(epochSecond, 0); + } + + public void setTimeInSeconds(long epochSecond, int nanos) { + localDateTime = LocalDateTime.ofEpochSecond( + epochSecond, nanos, ZoneOffset.UTC); + } + + public long getMillis() { + return localDateTime.toInstant(ZoneOffset.UTC).toEpochMilli(); + } + + public void setTimeInMillis(long epochMilli) { + localDateTime = LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC); + } + + public void setTimeInMillis(long epochMilli, int nanos) { + localDateTime = LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli).plusNanos(nanos), ZoneOffset.UTC); + } + + public int getNanos() { + return localDateTime.getNano(); + } + + public static Timestamp valueOf(String s) { + // need to handle offset with single digital hour, see JDK-8066806 + s = handleSingleDigitTime(s); + LocalDateTime localDateTime; + try { + localDateTime = LocalDateTime.parse(s, FORMATTER); + } catch (DateTimeParseException e) { + throw new IllegalArgumentException("Cannot create timestamp, parsing error"); + } + return new Timestamp(localDateTime); + } + + private static String handleSingleDigitTime(String s) { + Matcher matcher = SINGLE_DIGIT_PATTERN.matcher(s); + while (matcher.find()) { + int index = matcher.start() + 1; + s = s.substring(0, index) + "0" + s.substring(index, s.length()); + matcher = SINGLE_DIGIT_PATTERN.matcher(s); + } + return s; + } + + public static Timestamp ofEpochSecond(long epochSecond) { + return ofEpochSecond(epochSecond, 0); + } + + public static Timestamp ofEpochSecond(long epochSecond, int nanos) { + return new Timestamp( + LocalDateTime.ofEpochSecond(epochSecond, nanos, ZoneOffset.UTC)); + } + + public static Timestamp ofEpochMilli(long epochMilli) { + return ofEpochMilli(epochMilli, 0); + } + + public static Timestamp ofEpochMilli(long epochMilli, int nanos) { + return new Timestamp( + LocalDateTime.ofInstant( + Instant.ofEpochMilli(epochMilli).plusNanos(nanos), ZoneOffset.UTC)); + } + + public void setNanos(int nanos) { + localDateTime = localDateTime.plusNanos(nanos); + } + + /** + * Return a copy of this object. + */ + public Object clone() { + // LocalDateTime is immutable. + return new Timestamp(this.localDateTime); + } + +} diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java index a6f536933e..db27713935 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.ql.exec.vector; -import java.sql.Timestamp; import java.util.Arrays; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.io.Writable; /** @@ -72,7 +72,7 @@ public TimestampColumnVector(int len) { time = new long[len]; nanos = new int[len]; - scratchTimestamp = new Timestamp(0); + scratchTimestamp = new Timestamp(); scratchWritable = null; // Allocated by caller. } @@ -112,8 +112,7 @@ public int getNanos(int elementNum) { * @param elementNum */ public void timestampUpdate(Timestamp timestamp, int elementNum) { - timestamp.setTime(time[elementNum]); - timestamp.setNanos(nanos[elementNum]); + timestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); } /** @@ -123,8 +122,7 @@ public void timestampUpdate(Timestamp timestamp, int elementNum) { * @return */ public Timestamp asScratchTimestamp(int elementNum) { - scratchTimestamp.setTime(time[elementNum]); - scratchTimestamp.setNanos(nanos[elementNum]); + scratchTimestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); return scratchTimestamp; } @@ -142,8 +140,7 @@ public Timestamp getScratchTimestamp() { * @return */ public long getTimestampAsLong(int elementNum) { - scratchTimestamp.setTime(time[elementNum]); - scratchTimestamp.setNanos(nanos[elementNum]); + scratchTimestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); return getTimestampAsLong(scratchTimestamp); } @@ -153,30 +150,17 @@ public long getTimestampAsLong(int elementNum) { * @return */ public static long getTimestampAsLong(Timestamp timestamp) { - return millisToSeconds(timestamp.getTime()); + return timestamp.getSeconds(); } // Copy of TimestampWritable.millisToSeconds - /** - * Rounds the number of milliseconds relative to the epoch down to the nearest whole number of - * seconds. 500 would round to 0, -500 would round to -1. - */ - private static long millisToSeconds(long millis) { - if (millis >= 0) { - return millis / 1000; - } else { - return (millis - 999) / 1000; - } - } - /** * Return a double representation of a Timestamp. * @param elementNum * @return */ public double getDouble(int elementNum) { - scratchTimestamp.setTime(time[elementNum]); - scratchTimestamp.setNanos(nanos[elementNum]); + scratchTimestamp.setTimeInMillis(time[elementNum], nanos[elementNum]); return getDouble(scratchTimestamp); } @@ -188,7 +172,7 @@ public double getDouble(int elementNum) { public static double getDouble(Timestamp timestamp) { // Same algorithm as TimestampWritable (not currently import-able here). double seconds, nanos; - seconds = millisToSeconds(timestamp.getTime()); + seconds = timestamp.getSeconds(); nanos = timestamp.getNanos(); return seconds + nanos / 1000000000; } @@ -337,7 +321,7 @@ public void set(int elementNum, Timestamp timestamp) { noNulls = false; return; } - this.time[elementNum] = timestamp.getTime(); + this.time[elementNum] = timestamp.getMillis(); this.nanos[elementNum] = timestamp.getNanos(); } @@ -351,7 +335,7 @@ public void set(int elementNum, Timestamp timestamp) { * @param elementNum */ public void setFromScratchTimestamp(int elementNum) { - this.time[elementNum] = scratchTimestamp.getTime(); + this.time[elementNum] = scratchTimestamp.getMillis(); this.nanos[elementNum] = scratchTimestamp.getNanos(); } @@ -456,7 +440,7 @@ public void copySelected( public void fill(Timestamp timestamp) { isRepeating = true; isNull[0] = false; - time[0] = timestamp.getTime(); + time[0] = timestamp.getMillis(); nanos[0] = timestamp.getNanos(); } @@ -483,8 +467,7 @@ public void stringifyValue(StringBuilder buffer, int row) { row = 0; } if (noNulls || !isNull[row]) { - scratchTimestamp.setTime(time[row]); - scratchTimestamp.setNanos(nanos[row]); + scratchTimestamp.setTimeInMillis(time[row], nanos[row]); buffer.append(scratchTimestamp.toString()); } else { buffer.append("null"); diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java index a087a4d493..35c81c4d55 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hive.ql.util; +import java.math.BigDecimal; + import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveDecimalV1; +import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; -import java.math.BigDecimal; -import java.sql.Timestamp; - /** - * Utitilities for Timestamps and the relevant conversions. + * Utilities for Timestamps and the relevant conversions. */ public class TimestampUtils { public static final BigDecimal BILLION_BIG_DECIMAL = BigDecimal.valueOf(1000000000); @@ -36,7 +36,7 @@ * @return double representation of the timestamp, accurate to nanoseconds */ public static double getDouble(Timestamp ts) { - long seconds = millisToSeconds(ts.getTime()); + long seconds = ts.getSeconds(); return seconds + ((double) ts.getNanos()) / 1000000000; } @@ -52,17 +52,7 @@ public static Timestamp doubleToTimestamp(double f) { bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000)); int nanos = bd.intValue(); - // Convert to millis - long millis = seconds * 1000; - if (nanos < 0) { - millis -= 1000; - nanos += 1000000000; - } - Timestamp t = new Timestamp(millis); - - // Set remaining fractional portion to nanos - t.setNanos(nanos); - return t; + return Timestamp.ofEpochSecond(seconds, nanos); } catch (NumberFormatException nfe) { return null; } catch (IllegalArgumentException iae) { @@ -99,9 +89,7 @@ public static Timestamp decimalToTimestamp(HiveDecimal dec) { return null; } long seconds = nanoInstant.longValue(); - Timestamp t = new Timestamp(seconds * 1000); - t.setNanos(nanos); - return t; + return Timestamp.ofEpochSecond(seconds, nanos); } /** @@ -142,9 +130,7 @@ public static Timestamp decimalToTimestamp( } long seconds = nanoInstant.longValue(); - Timestamp timestamp = new Timestamp(seconds * 1000L); - timestamp.setNanos(nanos); - return timestamp; + return Timestamp.ofEpochSecond(seconds, nanos); } public static Timestamp decimalToTimestamp(HiveDecimalV1 dec) { @@ -156,10 +142,8 @@ public static Timestamp decimalToTimestamp(HiveDecimalV1 dec) { } long seconds = nanoInstant.subtract(new BigDecimal(nanos)).divide(BILLION_BIG_DECIMAL).longValue(); - Timestamp t = new Timestamp(seconds * 1000); - t.setNanos(nanos); - return t; + return Timestamp.ofEpochSecond(seconds, nanos); } catch (NumberFormatException nfe) { return null; } catch (IllegalArgumentException iae) { diff --git a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java index 6325d5d1db..b2a9e89da5 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java +++ b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java @@ -20,12 +20,8 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import java.sql.Date; -import java.util.Calendar; -import java.util.GregorianCalendar; -import java.util.TimeZone; -import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableUtils; @@ -40,32 +36,7 @@ */ public class DateWritable implements WritableComparable { - private static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1); - - // Local time zone. Store separately because Calendar would clone it. - // Java TimeZone has no mention of thread safety. Use thread local instance to be safe. - private static final ThreadLocal LOCAL_TIMEZONE = new ThreadLocal() { - @Override - protected TimeZone initialValue() { - return Calendar.getInstance().getTimeZone(); - } - }; - - private static final ThreadLocal UTC_CALENDAR = new ThreadLocal() { - @Override - protected Calendar initialValue() { - return new GregorianCalendar(TimeZone.getTimeZone("UTC")); - } - }; - private static final ThreadLocal LOCAL_CALENDAR = new ThreadLocal() { - @Override - protected Calendar initialValue() { - return Calendar.getInstance(); - } - }; - - // Internal representation is an integer representing day offset from our epoch value 1970-01-01 - private int daysSinceEpoch = 0; + private Date date = new Date(); /* Constructors */ public DateWritable() { @@ -88,7 +59,7 @@ public DateWritable(int d) { * @param d integer value representing days since epoch date */ public void set(int d) { - daysSinceEpoch = d; + date = Date.ofEpochDay(d); } /** @@ -97,32 +68,26 @@ public void set(int d) { */ public void set(Date d) { if (d == null) { - daysSinceEpoch = 0; + date = new Date(); return; } - set(dateToDays(d)); + set(d.getDays()); } public void set(DateWritable d) { - set(d.daysSinceEpoch); + set(d.getDays()); } /** * @return Date value corresponding to the date in the local time zone */ public Date get() { - return get(true); - } - - // TODO: we should call this more often. In theory, for DATE type, time should never matter, but - // it's hard to tell w/some code paths like UDFs/OIs etc. that are used in many places. - public Date get(boolean doesTimeMatter) { - return new Date(daysToMillis(daysSinceEpoch, doesTimeMatter)); + return date; } public int getDays() { - return daysSinceEpoch; + return (int) date.getLocalDate().toEpochDay(); } /** @@ -130,78 +95,38 @@ public int getDays() { * @return time in seconds corresponding to this DateWritable */ public long getTimeInSeconds() { - return get().getTime() / 1000; - } - - public static Date timeToDate(long l) { - return new Date(l * 1000); - } - - public static long daysToMillis(int d) { - return daysToMillis(d, true); - } - - public static long daysToMillis(int d, boolean doesTimeMatter) { - // What we are trying to get is the equivalent of new Date(ymd).getTime() in the local tz, - // where ymd is whatever d represents. How it "works" is this. - // First we get the UTC midnight for that day (which always exists, a small island of sanity). - long utcMidnight = d * MILLIS_PER_DAY; - // Now we take a local TZ offset at midnight UTC. Say we are in -4; that means (surprise - // surprise) that at midnight UTC it was 20:00 in local. So far we are on firm ground. - long utcMidnightOffset = LOCAL_TIMEZONE.get().getOffset(utcMidnight); - // And now we wander straight into the swamp, when instead of adding, we subtract it from UTC - // midnight to supposedly get local midnight (in the above case, 4:00 UTC). Of course, given - // all the insane DST variations, where we actually end up is anyone's guess. - long hopefullyMidnight = utcMidnight - utcMidnightOffset; - // Then we determine the local TZ offset at that magical time. - long offsetAtHM = LOCAL_TIMEZONE.get().getOffset(hopefullyMidnight); - // If the offsets are the same, we assume our initial jump did not cross any DST boundaries, - // and is thus valid. Both times flowed at the same pace. We congratulate ourselves and bail. - if (utcMidnightOffset == offsetAtHM) return hopefullyMidnight; - // Alas, we crossed some DST boundary. If the time of day doesn't matter to the caller, we'll - // simply get the next day and go back half a day. This is not ideal but seems to work. - if (!doesTimeMatter) return daysToMillis(d + 1) - (MILLIS_PER_DAY >> 1); - // Now, we could get previous and next day, figure our how many hours were inserted or removed, - // and from which of the days, etc. But at this point our gun is pointing straight at our foot, - // so let's just go the safe, expensive way. - Calendar utc = UTC_CALENDAR.get(), local = LOCAL_CALENDAR.get(); - utc.setTimeInMillis(utcMidnight); - local.set(utc.get(Calendar.YEAR), utc.get(Calendar.MONTH), utc.get(Calendar.DAY_OF_MONTH)); - return local.getTimeInMillis(); - } - - public static int millisToDays(long millisLocal) { - // We assume millisLocal is midnight of some date. What we are basically trying to do - // here is go from local-midnight to UTC-midnight (or whatever time that happens to be). - long millisUtc = millisLocal + LOCAL_TIMEZONE.get().getOffset(millisLocal); - int days; - if (millisUtc >= 0L) { - days = (int) (millisUtc / MILLIS_PER_DAY); - } else { - days = (int) ((millisUtc - 86399999 /*(MILLIS_PER_DAY - 1)*/) / MILLIS_PER_DAY); - } - return days; + return date.getSeconds(); + } + + public static Date timeToDate(long seconds) { + return Date.ofEpochMilli(seconds * 1000); + } + + public static long daysToMillis(int days) { + return Date.ofEpochDay(days).getMillis(); + } + + public static int millisToDays(long millis) { + return Date.ofEpochMilli(millis).getDays(); } public static int dateToDays(Date d) { - // convert to equivalent time in UTC, then get day offset - long millisLocal = d.getTime(); - return millisToDays(millisLocal); + return (int) d.getDays(); } @Override public void readFields(DataInput in) throws IOException { - daysSinceEpoch = WritableUtils.readVInt(in); + date.setTimeInDays(WritableUtils.readVInt(in)); } @Override public void write(DataOutput out) throws IOException { - WritableUtils.writeVInt(out, daysSinceEpoch); + WritableUtils.writeVInt(out, (int) date.getDays()); } @Override public int compareTo(DateWritable d) { - return daysSinceEpoch - d.daysSinceEpoch; + return date.compareTo(d.date); } @Override @@ -214,12 +139,11 @@ public boolean equals(Object o) { @Override public String toString() { - // For toString, the time does not matter - return get(false).toString(); + return date.toString(); } @Override public int hashCode() { - return daysSinceEpoch; + return date.hashCode(); } } diff --git a/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java b/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java index a22a10bfd4..127b76f977 100644 --- a/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java +++ b/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hive.common.type; -import java.sql.Timestamp; -import java.util.Random; -import java.util.Arrays; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -27,16 +28,15 @@ import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; +import java.util.Arrays; +import java.util.Random; -import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1; -import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; -import org.apache.hadoop.hive.common.type.RandomTypeUtil; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.util.TimestampUtils; - -import org.junit.*; - -import static org.junit.Assert.*; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1; +import org.junit.Assert; +import org.junit.Test; public class TestHiveDecimal extends HiveDecimalTestBase { diff --git a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java index 6ffd6d1d81..3abe62d239 100644 --- a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java +++ b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java @@ -18,15 +18,16 @@ package org.apache.hadoop.hive.ql.exec.vector; -import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.sql.Timestamp; +import java.time.LocalDateTime; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import org.apache.hadoop.hive.common.type.Timestamp; +import org.junit.Test; /** * Test for StructColumnVector @@ -110,11 +111,11 @@ public void testStringify() throws IOException { batch.cols[0] = x; batch.cols[1] = y; batch.reset(); - Timestamp ts = Timestamp.valueOf("2000-01-01 00:00:00"); + Timestamp ts = new Timestamp(LocalDateTime.of(2000, 1, 1, 0, 0, 0)); for(int r=0; r < 10; ++r) { batch.size += 1; x1.vector[r] = 3 * r; - ts.setTime(ts.getTime() + 1000); + ts.setTimeInMillis(ts.getMillis() + 1000); x2.set(r, ts); byte[] buffer = ("value " + r).getBytes(StandardCharsets.UTF_8); y.setRef(r, buffer, 0, buffer.length); diff --git a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java index dcbba7a9fc..0e13689d6e 100644 --- a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java +++ b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java @@ -23,11 +23,11 @@ import java.io.PrintWriter; import java.math.BigDecimal; import java.math.RoundingMode; -import java.sql.Timestamp; import java.util.Date; import java.util.Random; import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import org.apache.hadoop.hive.common.type.Timestamp; import static org.junit.Assert.*; diff --git a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java index fbb89a958d..09d5ed73fd 100644 --- a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java +++ b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java @@ -1591,7 +1591,7 @@ private void generateFilterColumnBetweenDynamicValue(String[] tdesc) throws Exce // Special case - Date requires its own specific BetweenDynamicValue class, but derives from FilterLongColumnBetween typeName = "Long"; } else if (operandType.equals("timestamp")) { - defaultValue = "new Timestamp(0)"; + defaultValue = "new Timestamp()"; vectorType = "Timestamp"; getPrimitiveMethod = "getTimestamp"; getValueMethod = ""; @@ -3129,8 +3129,7 @@ private void generateFilterDTIScalarCompareColumn(String[] tdesc) throws Excepti private String getDTIScalarColumnDisplayBody(String type) { if (type.equals("date")) { return - "Date dt = new Date(0);" + - " dt.setTime(DateWritable.daysToMillis((int) value));\n" + + "Date dt = Date.ofEpochMilli(DateWritable.daysToMillis((int) value));\n" + " return \"date \" + dt.toString() + \", \" + getColumnParamString(0, colNum);"; } else { return @@ -3141,8 +3140,7 @@ private String getDTIScalarColumnDisplayBody(String type) { private String getDTIColumnScalarDisplayBody(String type) { if (type.equals("date")) { return - "Date dt = new Date(0);" + - " dt.setTime(DateWritable.daysToMillis((int) value));\n" + + "Date dt = Date.ofEpochMilli(DateWritable.daysToMillis((int) value));\n" + " return getColumnParamString(0, colNum) + \", date \" + dt.toString();"; } else { return